reiprasetya-study's picture
Upload folder using huggingface_hub
28f4748 verified
{
"best_metric": 0.7813047170639038,
"best_model_checkpoint": "model/checkpoints/run1-python-codegen/checkpoint-10000",
"epoch": 4.999356582164458,
"eval_steps": 1000,
"global_step": 19425,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0025736713421696047,
"grad_norm": 1.7038387060165405,
"learning_rate": 4.997425997425998e-05,
"loss": 0.9335,
"step": 10
},
{
"epoch": 0.0051473426843392095,
"grad_norm": 1.1648929119110107,
"learning_rate": 4.994851994851995e-05,
"loss": 0.9433,
"step": 20
},
{
"epoch": 0.007721014026508815,
"grad_norm": 3.9555516242980957,
"learning_rate": 4.992277992277993e-05,
"loss": 0.9713,
"step": 30
},
{
"epoch": 0.010294685368678419,
"grad_norm": 0.9799448847770691,
"learning_rate": 4.9897039897039896e-05,
"loss": 0.9518,
"step": 40
},
{
"epoch": 0.012868356710848025,
"grad_norm": 1.26755952835083,
"learning_rate": 4.9871299871299874e-05,
"loss": 1.0754,
"step": 50
},
{
"epoch": 0.01544202805301763,
"grad_norm": 4.542232513427734,
"learning_rate": 4.9845559845559846e-05,
"loss": 0.9471,
"step": 60
},
{
"epoch": 0.018015699395187233,
"grad_norm": 1.2068519592285156,
"learning_rate": 4.9819819819819824e-05,
"loss": 0.9576,
"step": 70
},
{
"epoch": 0.020589370737356838,
"grad_norm": 1.019250750541687,
"learning_rate": 4.9794079794079796e-05,
"loss": 0.9516,
"step": 80
},
{
"epoch": 0.023163042079526446,
"grad_norm": 1.2534639835357666,
"learning_rate": 4.9768339768339774e-05,
"loss": 1.0046,
"step": 90
},
{
"epoch": 0.02573671342169605,
"grad_norm": 0.8234790563583374,
"learning_rate": 4.9742599742599746e-05,
"loss": 0.8847,
"step": 100
},
{
"epoch": 0.028310384763865656,
"grad_norm": 0.9790653586387634,
"learning_rate": 4.971685971685972e-05,
"loss": 0.8495,
"step": 110
},
{
"epoch": 0.03088405610603526,
"grad_norm": 4.180076599121094,
"learning_rate": 4.969111969111969e-05,
"loss": 0.8801,
"step": 120
},
{
"epoch": 0.03345772744820486,
"grad_norm": 0.9933150410652161,
"learning_rate": 4.966537966537967e-05,
"loss": 0.9608,
"step": 130
},
{
"epoch": 0.036031398790374466,
"grad_norm": 1.1884509325027466,
"learning_rate": 4.963963963963964e-05,
"loss": 0.9133,
"step": 140
},
{
"epoch": 0.03860507013254407,
"grad_norm": 4.013946056365967,
"learning_rate": 4.961389961389962e-05,
"loss": 0.9142,
"step": 150
},
{
"epoch": 0.041178741474713676,
"grad_norm": 2.0833263397216797,
"learning_rate": 4.958815958815959e-05,
"loss": 0.9864,
"step": 160
},
{
"epoch": 0.04375241281688328,
"grad_norm": 3.602749824523926,
"learning_rate": 4.956241956241957e-05,
"loss": 0.876,
"step": 170
},
{
"epoch": 0.04632608415905289,
"grad_norm": 1.2331393957138062,
"learning_rate": 4.953667953667954e-05,
"loss": 0.9233,
"step": 180
},
{
"epoch": 0.0488997555012225,
"grad_norm": 1.0604982376098633,
"learning_rate": 4.951093951093951e-05,
"loss": 0.9497,
"step": 190
},
{
"epoch": 0.0514734268433921,
"grad_norm": 1.0822978019714355,
"learning_rate": 4.948519948519948e-05,
"loss": 0.8719,
"step": 200
},
{
"epoch": 0.05404709818556171,
"grad_norm": 0.9496659636497498,
"learning_rate": 4.945945945945946e-05,
"loss": 0.9186,
"step": 210
},
{
"epoch": 0.05662076952773131,
"grad_norm": 1.0661075115203857,
"learning_rate": 4.943371943371944e-05,
"loss": 0.8779,
"step": 220
},
{
"epoch": 0.059194440869900916,
"grad_norm": 1.1287846565246582,
"learning_rate": 4.940797940797941e-05,
"loss": 0.8501,
"step": 230
},
{
"epoch": 0.06176811221207052,
"grad_norm": 1.16519296169281,
"learning_rate": 4.938223938223939e-05,
"loss": 0.9317,
"step": 240
},
{
"epoch": 0.06434178355424013,
"grad_norm": 1.5323387384414673,
"learning_rate": 4.935649935649936e-05,
"loss": 0.9511,
"step": 250
},
{
"epoch": 0.06691545489640972,
"grad_norm": 1.1095296144485474,
"learning_rate": 4.933075933075933e-05,
"loss": 0.9372,
"step": 260
},
{
"epoch": 0.06948912623857934,
"grad_norm": 1.1270581483840942,
"learning_rate": 4.9305019305019304e-05,
"loss": 0.9549,
"step": 270
},
{
"epoch": 0.07206279758074893,
"grad_norm": 1.0347909927368164,
"learning_rate": 4.927927927927928e-05,
"loss": 0.882,
"step": 280
},
{
"epoch": 0.07463646892291854,
"grad_norm": 0.9273091554641724,
"learning_rate": 4.9253539253539254e-05,
"loss": 0.9191,
"step": 290
},
{
"epoch": 0.07721014026508814,
"grad_norm": 0.9906871318817139,
"learning_rate": 4.922779922779923e-05,
"loss": 0.9259,
"step": 300
},
{
"epoch": 0.07978381160725775,
"grad_norm": 0.8945180773735046,
"learning_rate": 4.9202059202059204e-05,
"loss": 0.8202,
"step": 310
},
{
"epoch": 0.08235748294942735,
"grad_norm": 1.2765549421310425,
"learning_rate": 4.917631917631918e-05,
"loss": 0.9122,
"step": 320
},
{
"epoch": 0.08493115429159696,
"grad_norm": 0.8597398400306702,
"learning_rate": 4.9150579150579154e-05,
"loss": 0.9114,
"step": 330
},
{
"epoch": 0.08750482563376656,
"grad_norm": 3.5975146293640137,
"learning_rate": 4.9124839124839125e-05,
"loss": 0.8598,
"step": 340
},
{
"epoch": 0.09007849697593617,
"grad_norm": 4.273248672485352,
"learning_rate": 4.90990990990991e-05,
"loss": 0.9325,
"step": 350
},
{
"epoch": 0.09265216831810578,
"grad_norm": 0.9573097229003906,
"learning_rate": 4.9073359073359075e-05,
"loss": 0.992,
"step": 360
},
{
"epoch": 0.09522583966027538,
"grad_norm": 1.2998064756393433,
"learning_rate": 4.904761904761905e-05,
"loss": 0.8984,
"step": 370
},
{
"epoch": 0.097799511002445,
"grad_norm": 1.3857780694961548,
"learning_rate": 4.9021879021879025e-05,
"loss": 1.0102,
"step": 380
},
{
"epoch": 0.10037318234461459,
"grad_norm": 1.2418707609176636,
"learning_rate": 4.8996138996139e-05,
"loss": 0.9023,
"step": 390
},
{
"epoch": 0.1029468536867842,
"grad_norm": 1.2056580781936646,
"learning_rate": 4.8970398970398975e-05,
"loss": 0.8691,
"step": 400
},
{
"epoch": 0.1055205250289538,
"grad_norm": 0.9754079580307007,
"learning_rate": 4.894465894465895e-05,
"loss": 0.9484,
"step": 410
},
{
"epoch": 0.10809419637112341,
"grad_norm": 0.9066883325576782,
"learning_rate": 4.891891891891892e-05,
"loss": 0.9009,
"step": 420
},
{
"epoch": 0.11066786771329301,
"grad_norm": 1.480106234550476,
"learning_rate": 4.88931788931789e-05,
"loss": 0.8764,
"step": 430
},
{
"epoch": 0.11324153905546262,
"grad_norm": 1.1571600437164307,
"learning_rate": 4.886743886743887e-05,
"loss": 0.8711,
"step": 440
},
{
"epoch": 0.11581521039763222,
"grad_norm": 0.9223948121070862,
"learning_rate": 4.884169884169885e-05,
"loss": 0.8466,
"step": 450
},
{
"epoch": 0.11838888173980183,
"grad_norm": 1.7378956079483032,
"learning_rate": 4.881595881595882e-05,
"loss": 0.98,
"step": 460
},
{
"epoch": 0.12096255308197143,
"grad_norm": 1.114259958267212,
"learning_rate": 4.87902187902188e-05,
"loss": 0.9181,
"step": 470
},
{
"epoch": 0.12353622442414104,
"grad_norm": 1.0394957065582275,
"learning_rate": 4.876447876447877e-05,
"loss": 0.9139,
"step": 480
},
{
"epoch": 0.12610989576631065,
"grad_norm": 1.100290298461914,
"learning_rate": 4.873873873873874e-05,
"loss": 0.8707,
"step": 490
},
{
"epoch": 0.12868356710848025,
"grad_norm": 0.9463397264480591,
"learning_rate": 4.871299871299871e-05,
"loss": 0.8608,
"step": 500
},
{
"epoch": 0.13125723845064985,
"grad_norm": 0.9843112230300903,
"learning_rate": 4.868725868725869e-05,
"loss": 0.931,
"step": 510
},
{
"epoch": 0.13383090979281945,
"grad_norm": 1.050361156463623,
"learning_rate": 4.866151866151866e-05,
"loss": 0.9192,
"step": 520
},
{
"epoch": 0.13640458113498907,
"grad_norm": 1.1644479036331177,
"learning_rate": 4.863577863577864e-05,
"loss": 0.8797,
"step": 530
},
{
"epoch": 0.13897825247715867,
"grad_norm": 1.2110307216644287,
"learning_rate": 4.861003861003861e-05,
"loss": 0.9662,
"step": 540
},
{
"epoch": 0.14155192381932827,
"grad_norm": 1.7485480308532715,
"learning_rate": 4.858429858429859e-05,
"loss": 0.9391,
"step": 550
},
{
"epoch": 0.14412559516149787,
"grad_norm": 4.363419532775879,
"learning_rate": 4.855855855855856e-05,
"loss": 0.9487,
"step": 560
},
{
"epoch": 0.1466992665036675,
"grad_norm": 1.028351068496704,
"learning_rate": 4.853281853281853e-05,
"loss": 0.7843,
"step": 570
},
{
"epoch": 0.1492729378458371,
"grad_norm": 1.3750572204589844,
"learning_rate": 4.8507078507078505e-05,
"loss": 0.9074,
"step": 580
},
{
"epoch": 0.1518466091880067,
"grad_norm": 4.166394233703613,
"learning_rate": 4.848133848133848e-05,
"loss": 0.9494,
"step": 590
},
{
"epoch": 0.15442028053017628,
"grad_norm": 1.1192243099212646,
"learning_rate": 4.8455598455598455e-05,
"loss": 0.9937,
"step": 600
},
{
"epoch": 0.1569939518723459,
"grad_norm": 1.243043065071106,
"learning_rate": 4.842985842985843e-05,
"loss": 0.8259,
"step": 610
},
{
"epoch": 0.1595676232145155,
"grad_norm": 1.0789657831192017,
"learning_rate": 4.8404118404118405e-05,
"loss": 0.8466,
"step": 620
},
{
"epoch": 0.1621412945566851,
"grad_norm": 1.2137137651443481,
"learning_rate": 4.837837837837838e-05,
"loss": 0.8358,
"step": 630
},
{
"epoch": 0.1647149658988547,
"grad_norm": 5.4415974617004395,
"learning_rate": 4.8352638352638355e-05,
"loss": 0.9067,
"step": 640
},
{
"epoch": 0.16728863724102433,
"grad_norm": 1.1608994007110596,
"learning_rate": 4.8326898326898327e-05,
"loss": 0.8828,
"step": 650
},
{
"epoch": 0.16986230858319393,
"grad_norm": 1.287034034729004,
"learning_rate": 4.8301158301158305e-05,
"loss": 0.8905,
"step": 660
},
{
"epoch": 0.17243597992536353,
"grad_norm": 1.0879331827163696,
"learning_rate": 4.8275418275418277e-05,
"loss": 0.8821,
"step": 670
},
{
"epoch": 0.17500965126753312,
"grad_norm": 1.1660597324371338,
"learning_rate": 4.8249678249678255e-05,
"loss": 0.8955,
"step": 680
},
{
"epoch": 0.17758332260970275,
"grad_norm": 0.8420181274414062,
"learning_rate": 4.8223938223938227e-05,
"loss": 0.8927,
"step": 690
},
{
"epoch": 0.18015699395187235,
"grad_norm": 0.7901207208633423,
"learning_rate": 4.8198198198198205e-05,
"loss": 0.8926,
"step": 700
},
{
"epoch": 0.18273066529404194,
"grad_norm": 1.2035242319107056,
"learning_rate": 4.8172458172458177e-05,
"loss": 0.8742,
"step": 710
},
{
"epoch": 0.18530433663621157,
"grad_norm": 0.8758509159088135,
"learning_rate": 4.814671814671815e-05,
"loss": 0.9302,
"step": 720
},
{
"epoch": 0.18787800797838117,
"grad_norm": 0.9391271471977234,
"learning_rate": 4.812097812097812e-05,
"loss": 0.8675,
"step": 730
},
{
"epoch": 0.19045167932055077,
"grad_norm": 4.989656448364258,
"learning_rate": 4.80952380952381e-05,
"loss": 0.9314,
"step": 740
},
{
"epoch": 0.19302535066272036,
"grad_norm": 2.094479560852051,
"learning_rate": 4.806949806949807e-05,
"loss": 0.8705,
"step": 750
},
{
"epoch": 0.19559902200489,
"grad_norm": 1.1212397813796997,
"learning_rate": 4.804375804375805e-05,
"loss": 0.9013,
"step": 760
},
{
"epoch": 0.1981726933470596,
"grad_norm": 3.019252300262451,
"learning_rate": 4.801801801801802e-05,
"loss": 0.8546,
"step": 770
},
{
"epoch": 0.20074636468922918,
"grad_norm": 2.4471728801727295,
"learning_rate": 4.7992277992278e-05,
"loss": 0.8738,
"step": 780
},
{
"epoch": 0.20332003603139878,
"grad_norm": 1.226980209350586,
"learning_rate": 4.796653796653797e-05,
"loss": 0.921,
"step": 790
},
{
"epoch": 0.2058937073735684,
"grad_norm": 0.9777143001556396,
"learning_rate": 4.794079794079794e-05,
"loss": 0.893,
"step": 800
},
{
"epoch": 0.208467378715738,
"grad_norm": 1.05388343334198,
"learning_rate": 4.791505791505791e-05,
"loss": 0.8911,
"step": 810
},
{
"epoch": 0.2110410500579076,
"grad_norm": 3.421910285949707,
"learning_rate": 4.788931788931789e-05,
"loss": 0.9105,
"step": 820
},
{
"epoch": 0.2136147214000772,
"grad_norm": 1.2801098823547363,
"learning_rate": 4.786357786357786e-05,
"loss": 0.8692,
"step": 830
},
{
"epoch": 0.21618839274224683,
"grad_norm": 0.8514150381088257,
"learning_rate": 4.783783783783784e-05,
"loss": 0.8643,
"step": 840
},
{
"epoch": 0.21876206408441642,
"grad_norm": 3.815889596939087,
"learning_rate": 4.781209781209782e-05,
"loss": 0.8395,
"step": 850
},
{
"epoch": 0.22133573542658602,
"grad_norm": 1.0590102672576904,
"learning_rate": 4.778635778635779e-05,
"loss": 0.9168,
"step": 860
},
{
"epoch": 0.22390940676875562,
"grad_norm": 0.9641560316085815,
"learning_rate": 4.776061776061776e-05,
"loss": 0.8777,
"step": 870
},
{
"epoch": 0.22648307811092525,
"grad_norm": 1.1582986116409302,
"learning_rate": 4.7734877734877735e-05,
"loss": 0.9626,
"step": 880
},
{
"epoch": 0.22905674945309484,
"grad_norm": 1.3446133136749268,
"learning_rate": 4.770913770913771e-05,
"loss": 0.8254,
"step": 890
},
{
"epoch": 0.23163042079526444,
"grad_norm": 0.9514161944389343,
"learning_rate": 4.7683397683397685e-05,
"loss": 0.9177,
"step": 900
},
{
"epoch": 0.23420409213743404,
"grad_norm": 1.3230582475662231,
"learning_rate": 4.765765765765766e-05,
"loss": 0.9428,
"step": 910
},
{
"epoch": 0.23677776347960366,
"grad_norm": 3.5208494663238525,
"learning_rate": 4.7631917631917635e-05,
"loss": 0.9657,
"step": 920
},
{
"epoch": 0.23935143482177326,
"grad_norm": 1.2036529779434204,
"learning_rate": 4.760617760617761e-05,
"loss": 0.9157,
"step": 930
},
{
"epoch": 0.24192510616394286,
"grad_norm": 0.9241206645965576,
"learning_rate": 4.7580437580437585e-05,
"loss": 0.931,
"step": 940
},
{
"epoch": 0.24449877750611246,
"grad_norm": 0.9766173958778381,
"learning_rate": 4.7554697554697556e-05,
"loss": 0.8657,
"step": 950
},
{
"epoch": 0.24707244884828208,
"grad_norm": 1.0403867959976196,
"learning_rate": 4.752895752895753e-05,
"loss": 1.002,
"step": 960
},
{
"epoch": 0.24964612019045168,
"grad_norm": 1.6326649188995361,
"learning_rate": 4.7503217503217506e-05,
"loss": 0.8585,
"step": 970
},
{
"epoch": 0.2522197915326213,
"grad_norm": 0.8411473035812378,
"learning_rate": 4.747747747747748e-05,
"loss": 0.8956,
"step": 980
},
{
"epoch": 0.2547934628747909,
"grad_norm": 1.342337727546692,
"learning_rate": 4.7451737451737456e-05,
"loss": 0.9417,
"step": 990
},
{
"epoch": 0.2573671342169605,
"grad_norm": 1.0992860794067383,
"learning_rate": 4.742599742599743e-05,
"loss": 0.8915,
"step": 1000
},
{
"epoch": 0.2573671342169605,
"eval_loss": 0.8595815300941467,
"eval_runtime": 395.6739,
"eval_samples_per_second": 49.096,
"eval_steps_per_second": 2.457,
"eval_token_accuracy": 0.0003721938368800249,
"step": 1000
},
{
"epoch": 0.2599408055591301,
"grad_norm": 0.776602566242218,
"learning_rate": 4.7400257400257406e-05,
"loss": 0.8822,
"step": 1010
},
{
"epoch": 0.2625144769012997,
"grad_norm": 1.099664568901062,
"learning_rate": 4.737451737451737e-05,
"loss": 0.8394,
"step": 1020
},
{
"epoch": 0.2650881482434693,
"grad_norm": 0.80992192029953,
"learning_rate": 4.734877734877735e-05,
"loss": 0.8998,
"step": 1030
},
{
"epoch": 0.2676618195856389,
"grad_norm": 0.8013533353805542,
"learning_rate": 4.732303732303733e-05,
"loss": 0.8744,
"step": 1040
},
{
"epoch": 0.2702354909278085,
"grad_norm": 0.8478692770004272,
"learning_rate": 4.72972972972973e-05,
"loss": 0.8574,
"step": 1050
},
{
"epoch": 0.27280916226997814,
"grad_norm": 1.3109067678451538,
"learning_rate": 4.727155727155728e-05,
"loss": 0.914,
"step": 1060
},
{
"epoch": 0.2753828336121477,
"grad_norm": 1.043544888496399,
"learning_rate": 4.724581724581725e-05,
"loss": 0.8326,
"step": 1070
},
{
"epoch": 0.27795650495431734,
"grad_norm": 0.9514428973197937,
"learning_rate": 4.722007722007723e-05,
"loss": 0.9688,
"step": 1080
},
{
"epoch": 0.28053017629648697,
"grad_norm": 2.1324360370635986,
"learning_rate": 4.719433719433719e-05,
"loss": 0.9619,
"step": 1090
},
{
"epoch": 0.28310384763865654,
"grad_norm": 1.0498496294021606,
"learning_rate": 4.716859716859717e-05,
"loss": 0.8134,
"step": 1100
},
{
"epoch": 0.28567751898082616,
"grad_norm": 1.7257237434387207,
"learning_rate": 4.714285714285714e-05,
"loss": 0.8972,
"step": 1110
},
{
"epoch": 0.28825119032299573,
"grad_norm": 3.86918568611145,
"learning_rate": 4.711711711711712e-05,
"loss": 0.9396,
"step": 1120
},
{
"epoch": 0.29082486166516536,
"grad_norm": 0.8664824962615967,
"learning_rate": 4.709137709137709e-05,
"loss": 0.8619,
"step": 1130
},
{
"epoch": 0.293398533007335,
"grad_norm": 1.012129783630371,
"learning_rate": 4.706563706563707e-05,
"loss": 0.858,
"step": 1140
},
{
"epoch": 0.29597220434950455,
"grad_norm": 1.0971994400024414,
"learning_rate": 4.703989703989704e-05,
"loss": 0.8932,
"step": 1150
},
{
"epoch": 0.2985458756916742,
"grad_norm": 1.346057415008545,
"learning_rate": 4.7014157014157014e-05,
"loss": 0.9042,
"step": 1160
},
{
"epoch": 0.3011195470338438,
"grad_norm": 1.1237205266952515,
"learning_rate": 4.6988416988416986e-05,
"loss": 0.7794,
"step": 1170
},
{
"epoch": 0.3036932183760134,
"grad_norm": 0.975764274597168,
"learning_rate": 4.6962676962676964e-05,
"loss": 0.8831,
"step": 1180
},
{
"epoch": 0.306266889718183,
"grad_norm": 1.2752641439437866,
"learning_rate": 4.6936936936936936e-05,
"loss": 0.8896,
"step": 1190
},
{
"epoch": 0.30884056106035257,
"grad_norm": 4.031650543212891,
"learning_rate": 4.6911196911196914e-05,
"loss": 0.9143,
"step": 1200
},
{
"epoch": 0.3114142324025222,
"grad_norm": 1.1821297407150269,
"learning_rate": 4.6885456885456886e-05,
"loss": 0.8443,
"step": 1210
},
{
"epoch": 0.3139879037446918,
"grad_norm": 1.1812553405761719,
"learning_rate": 4.6859716859716864e-05,
"loss": 0.9018,
"step": 1220
},
{
"epoch": 0.3165615750868614,
"grad_norm": 0.8742527365684509,
"learning_rate": 4.6833976833976836e-05,
"loss": 0.7639,
"step": 1230
},
{
"epoch": 0.319135246429031,
"grad_norm": 1.2843698263168335,
"learning_rate": 4.680823680823681e-05,
"loss": 0.8889,
"step": 1240
},
{
"epoch": 0.32170891777120064,
"grad_norm": 0.6974703073501587,
"learning_rate": 4.6782496782496786e-05,
"loss": 0.9007,
"step": 1250
},
{
"epoch": 0.3242825891133702,
"grad_norm": 3.426180839538574,
"learning_rate": 4.675675675675676e-05,
"loss": 0.8939,
"step": 1260
},
{
"epoch": 0.32685626045553984,
"grad_norm": 1.3513213396072388,
"learning_rate": 4.6731016731016736e-05,
"loss": 0.8406,
"step": 1270
},
{
"epoch": 0.3294299317977094,
"grad_norm": 1.0384771823883057,
"learning_rate": 4.670527670527671e-05,
"loss": 0.8806,
"step": 1280
},
{
"epoch": 0.33200360313987903,
"grad_norm": 1.1321779489517212,
"learning_rate": 4.6679536679536686e-05,
"loss": 0.7905,
"step": 1290
},
{
"epoch": 0.33457727448204866,
"grad_norm": 0.8733064532279968,
"learning_rate": 4.665379665379666e-05,
"loss": 0.8215,
"step": 1300
},
{
"epoch": 0.33715094582421823,
"grad_norm": 0.7575940489768982,
"learning_rate": 4.662805662805663e-05,
"loss": 0.8211,
"step": 1310
},
{
"epoch": 0.33972461716638785,
"grad_norm": 0.6808631420135498,
"learning_rate": 4.66023166023166e-05,
"loss": 0.8255,
"step": 1320
},
{
"epoch": 0.3422982885085575,
"grad_norm": 1.1921888589859009,
"learning_rate": 4.657657657657658e-05,
"loss": 0.8856,
"step": 1330
},
{
"epoch": 0.34487195985072705,
"grad_norm": 1.2014636993408203,
"learning_rate": 4.655083655083655e-05,
"loss": 0.8523,
"step": 1340
},
{
"epoch": 0.3474456311928967,
"grad_norm": 1.0800600051879883,
"learning_rate": 4.652509652509653e-05,
"loss": 0.8697,
"step": 1350
},
{
"epoch": 0.35001930253506625,
"grad_norm": 1.1528334617614746,
"learning_rate": 4.64993564993565e-05,
"loss": 0.8628,
"step": 1360
},
{
"epoch": 0.35259297387723587,
"grad_norm": 1.2347609996795654,
"learning_rate": 4.647361647361648e-05,
"loss": 0.8431,
"step": 1370
},
{
"epoch": 0.3551666452194055,
"grad_norm": 1.086850881576538,
"learning_rate": 4.644787644787645e-05,
"loss": 0.8429,
"step": 1380
},
{
"epoch": 0.35774031656157507,
"grad_norm": 1.0318641662597656,
"learning_rate": 4.642213642213642e-05,
"loss": 0.9399,
"step": 1390
},
{
"epoch": 0.3603139879037447,
"grad_norm": 0.9806049466133118,
"learning_rate": 4.6396396396396394e-05,
"loss": 0.8605,
"step": 1400
},
{
"epoch": 0.3628876592459143,
"grad_norm": 1.040371298789978,
"learning_rate": 4.637065637065637e-05,
"loss": 0.8756,
"step": 1410
},
{
"epoch": 0.3654613305880839,
"grad_norm": 3.93825626373291,
"learning_rate": 4.6344916344916344e-05,
"loss": 0.8493,
"step": 1420
},
{
"epoch": 0.3680350019302535,
"grad_norm": 1.530876874923706,
"learning_rate": 4.631917631917632e-05,
"loss": 0.8755,
"step": 1430
},
{
"epoch": 0.37060867327242314,
"grad_norm": 1.0033499002456665,
"learning_rate": 4.6293436293436294e-05,
"loss": 0.8141,
"step": 1440
},
{
"epoch": 0.3731823446145927,
"grad_norm": 1.1045700311660767,
"learning_rate": 4.626769626769627e-05,
"loss": 0.8565,
"step": 1450
},
{
"epoch": 0.37575601595676233,
"grad_norm": 0.9272418022155762,
"learning_rate": 4.6241956241956244e-05,
"loss": 0.8335,
"step": 1460
},
{
"epoch": 0.3783296872989319,
"grad_norm": 2.702624559402466,
"learning_rate": 4.6216216216216215e-05,
"loss": 0.8326,
"step": 1470
},
{
"epoch": 0.38090335864110153,
"grad_norm": 1.0378378629684448,
"learning_rate": 4.6190476190476194e-05,
"loss": 0.8696,
"step": 1480
},
{
"epoch": 0.38347702998327116,
"grad_norm": 0.9696075320243835,
"learning_rate": 4.6164736164736165e-05,
"loss": 0.8559,
"step": 1490
},
{
"epoch": 0.3860507013254407,
"grad_norm": 0.9262058734893799,
"learning_rate": 4.6138996138996144e-05,
"loss": 0.9104,
"step": 1500
},
{
"epoch": 0.38862437266761035,
"grad_norm": 3.3203704357147217,
"learning_rate": 4.6113256113256115e-05,
"loss": 0.803,
"step": 1510
},
{
"epoch": 0.39119804400978,
"grad_norm": 1.3920564651489258,
"learning_rate": 4.6087516087516094e-05,
"loss": 0.9708,
"step": 1520
},
{
"epoch": 0.39377171535194955,
"grad_norm": 0.9011679291725159,
"learning_rate": 4.6061776061776065e-05,
"loss": 0.872,
"step": 1530
},
{
"epoch": 0.3963453866941192,
"grad_norm": 1.1053688526153564,
"learning_rate": 4.603603603603604e-05,
"loss": 0.882,
"step": 1540
},
{
"epoch": 0.39891905803628874,
"grad_norm": 0.8040546774864197,
"learning_rate": 4.601029601029601e-05,
"loss": 0.8539,
"step": 1550
},
{
"epoch": 0.40149272937845837,
"grad_norm": 3.191498041152954,
"learning_rate": 4.598455598455599e-05,
"loss": 0.9703,
"step": 1560
},
{
"epoch": 0.404066400720628,
"grad_norm": 1.2510138750076294,
"learning_rate": 4.595881595881596e-05,
"loss": 0.8436,
"step": 1570
},
{
"epoch": 0.40664007206279756,
"grad_norm": 0.8919886350631714,
"learning_rate": 4.593307593307594e-05,
"loss": 0.8437,
"step": 1580
},
{
"epoch": 0.4092137434049672,
"grad_norm": 0.8489089608192444,
"learning_rate": 4.590733590733591e-05,
"loss": 0.9111,
"step": 1590
},
{
"epoch": 0.4117874147471368,
"grad_norm": 1.2358871698379517,
"learning_rate": 4.588159588159589e-05,
"loss": 0.8806,
"step": 1600
},
{
"epoch": 0.4143610860893064,
"grad_norm": 1.3173274993896484,
"learning_rate": 4.585585585585586e-05,
"loss": 0.833,
"step": 1610
},
{
"epoch": 0.416934757431476,
"grad_norm": 1.3292251825332642,
"learning_rate": 4.583011583011583e-05,
"loss": 0.8926,
"step": 1620
},
{
"epoch": 0.4195084287736456,
"grad_norm": 1.155118465423584,
"learning_rate": 4.58043758043758e-05,
"loss": 0.8289,
"step": 1630
},
{
"epoch": 0.4220821001158152,
"grad_norm": 0.9190046787261963,
"learning_rate": 4.577863577863578e-05,
"loss": 0.8332,
"step": 1640
},
{
"epoch": 0.42465577145798483,
"grad_norm": 0.9945672750473022,
"learning_rate": 4.575289575289575e-05,
"loss": 0.8378,
"step": 1650
},
{
"epoch": 0.4272294428001544,
"grad_norm": 1.0910547971725464,
"learning_rate": 4.572715572715573e-05,
"loss": 0.8843,
"step": 1660
},
{
"epoch": 0.429803114142324,
"grad_norm": 1.24397611618042,
"learning_rate": 4.570141570141571e-05,
"loss": 0.8926,
"step": 1670
},
{
"epoch": 0.43237678548449365,
"grad_norm": 1.0651038885116577,
"learning_rate": 4.567567567567568e-05,
"loss": 0.8542,
"step": 1680
},
{
"epoch": 0.4349504568266632,
"grad_norm": 1.0350909233093262,
"learning_rate": 4.564993564993565e-05,
"loss": 0.8258,
"step": 1690
},
{
"epoch": 0.43752412816883285,
"grad_norm": 1.0753473043441772,
"learning_rate": 4.5624195624195624e-05,
"loss": 0.8557,
"step": 1700
},
{
"epoch": 0.4400977995110024,
"grad_norm": 1.2361218929290771,
"learning_rate": 4.55984555984556e-05,
"loss": 0.9459,
"step": 1710
},
{
"epoch": 0.44267147085317204,
"grad_norm": 1.2962043285369873,
"learning_rate": 4.5572715572715574e-05,
"loss": 0.8822,
"step": 1720
},
{
"epoch": 0.44524514219534167,
"grad_norm": 2.557123899459839,
"learning_rate": 4.554697554697555e-05,
"loss": 0.8018,
"step": 1730
},
{
"epoch": 0.44781881353751124,
"grad_norm": 0.8217394351959229,
"learning_rate": 4.5521235521235524e-05,
"loss": 0.8012,
"step": 1740
},
{
"epoch": 0.45039248487968087,
"grad_norm": 0.9458732008934021,
"learning_rate": 4.54954954954955e-05,
"loss": 0.835,
"step": 1750
},
{
"epoch": 0.4529661562218505,
"grad_norm": 0.9185703992843628,
"learning_rate": 4.5469755469755473e-05,
"loss": 0.8571,
"step": 1760
},
{
"epoch": 0.45553982756402006,
"grad_norm": 1.190473198890686,
"learning_rate": 4.5444015444015445e-05,
"loss": 0.81,
"step": 1770
},
{
"epoch": 0.4581134989061897,
"grad_norm": 1.1734472513198853,
"learning_rate": 4.541827541827542e-05,
"loss": 0.8926,
"step": 1780
},
{
"epoch": 0.46068717024835926,
"grad_norm": 1.0630743503570557,
"learning_rate": 4.5392535392535395e-05,
"loss": 0.8848,
"step": 1790
},
{
"epoch": 0.4632608415905289,
"grad_norm": 3.2704575061798096,
"learning_rate": 4.536679536679537e-05,
"loss": 0.923,
"step": 1800
},
{
"epoch": 0.4658345129326985,
"grad_norm": 0.8586927056312561,
"learning_rate": 4.5341055341055345e-05,
"loss": 0.8056,
"step": 1810
},
{
"epoch": 0.4684081842748681,
"grad_norm": 1.2519656419754028,
"learning_rate": 4.531531531531532e-05,
"loss": 0.8505,
"step": 1820
},
{
"epoch": 0.4709818556170377,
"grad_norm": 0.8403356075286865,
"learning_rate": 4.5289575289575295e-05,
"loss": 0.8093,
"step": 1830
},
{
"epoch": 0.47355552695920733,
"grad_norm": 1.1714375019073486,
"learning_rate": 4.526383526383527e-05,
"loss": 0.9739,
"step": 1840
},
{
"epoch": 0.4761291983013769,
"grad_norm": 0.9783056378364563,
"learning_rate": 4.523809523809524e-05,
"loss": 0.8602,
"step": 1850
},
{
"epoch": 0.4787028696435465,
"grad_norm": 2.869663715362549,
"learning_rate": 4.521235521235521e-05,
"loss": 0.8444,
"step": 1860
},
{
"epoch": 0.48127654098571615,
"grad_norm": 0.8355293273925781,
"learning_rate": 4.518661518661519e-05,
"loss": 0.9036,
"step": 1870
},
{
"epoch": 0.4838502123278857,
"grad_norm": 1.140135407447815,
"learning_rate": 4.516087516087517e-05,
"loss": 0.8022,
"step": 1880
},
{
"epoch": 0.48642388367005535,
"grad_norm": 0.9020267724990845,
"learning_rate": 4.513513513513514e-05,
"loss": 0.8493,
"step": 1890
},
{
"epoch": 0.4889975550122249,
"grad_norm": 3.613842248916626,
"learning_rate": 4.510939510939512e-05,
"loss": 0.8602,
"step": 1900
},
{
"epoch": 0.49157122635439454,
"grad_norm": 3.0518836975097656,
"learning_rate": 4.508365508365509e-05,
"loss": 0.8379,
"step": 1910
},
{
"epoch": 0.49414489769656417,
"grad_norm": 0.9278186559677124,
"learning_rate": 4.505791505791506e-05,
"loss": 0.9156,
"step": 1920
},
{
"epoch": 0.49671856903873374,
"grad_norm": 4.282588481903076,
"learning_rate": 4.503217503217503e-05,
"loss": 0.9301,
"step": 1930
},
{
"epoch": 0.49929224038090336,
"grad_norm": 3.971865653991699,
"learning_rate": 4.500643500643501e-05,
"loss": 0.8885,
"step": 1940
},
{
"epoch": 0.5018659117230729,
"grad_norm": 1.102675199508667,
"learning_rate": 4.498069498069498e-05,
"loss": 0.8936,
"step": 1950
},
{
"epoch": 0.5044395830652426,
"grad_norm": 0.9979203939437866,
"learning_rate": 4.495495495495496e-05,
"loss": 0.8014,
"step": 1960
},
{
"epoch": 0.5070132544074122,
"grad_norm": 4.093165397644043,
"learning_rate": 4.492921492921493e-05,
"loss": 0.8699,
"step": 1970
},
{
"epoch": 0.5095869257495818,
"grad_norm": 1.107493281364441,
"learning_rate": 4.490347490347491e-05,
"loss": 0.8319,
"step": 1980
},
{
"epoch": 0.5121605970917514,
"grad_norm": 0.8577767014503479,
"learning_rate": 4.487773487773488e-05,
"loss": 0.8874,
"step": 1990
},
{
"epoch": 0.514734268433921,
"grad_norm": 1.2690497636795044,
"learning_rate": 4.485199485199485e-05,
"loss": 0.8586,
"step": 2000
},
{
"epoch": 0.514734268433921,
"eval_loss": 0.8306575417518616,
"eval_runtime": 395.638,
"eval_samples_per_second": 49.1,
"eval_steps_per_second": 2.457,
"eval_token_accuracy": 0.00039326141255247916,
"step": 2000
},
{
"epoch": 0.5173079397760906,
"grad_norm": 1.1434204578399658,
"learning_rate": 4.4826254826254825e-05,
"loss": 0.9541,
"step": 2010
},
{
"epoch": 0.5198816111182601,
"grad_norm": 3.3350868225097656,
"learning_rate": 4.48005148005148e-05,
"loss": 0.8295,
"step": 2020
},
{
"epoch": 0.5224552824604298,
"grad_norm": 1.2531917095184326,
"learning_rate": 4.4774774774774775e-05,
"loss": 0.8997,
"step": 2030
},
{
"epoch": 0.5250289538025994,
"grad_norm": 0.9255910515785217,
"learning_rate": 4.474903474903475e-05,
"loss": 0.8375,
"step": 2040
},
{
"epoch": 0.527602625144769,
"grad_norm": 1.0736703872680664,
"learning_rate": 4.4723294723294725e-05,
"loss": 0.8293,
"step": 2050
},
{
"epoch": 0.5301762964869386,
"grad_norm": 1.5314749479293823,
"learning_rate": 4.46975546975547e-05,
"loss": 0.8036,
"step": 2060
},
{
"epoch": 0.5327499678291082,
"grad_norm": 1.0924575328826904,
"learning_rate": 4.467181467181467e-05,
"loss": 0.7548,
"step": 2070
},
{
"epoch": 0.5353236391712778,
"grad_norm": 0.9415216445922852,
"learning_rate": 4.4646074646074646e-05,
"loss": 0.8181,
"step": 2080
},
{
"epoch": 0.5378973105134475,
"grad_norm": 1.0533065795898438,
"learning_rate": 4.4620334620334625e-05,
"loss": 0.8045,
"step": 2090
},
{
"epoch": 0.540470981855617,
"grad_norm": 1.1450499296188354,
"learning_rate": 4.4594594594594596e-05,
"loss": 0.8571,
"step": 2100
},
{
"epoch": 0.5430446531977866,
"grad_norm": 3.1443707942962646,
"learning_rate": 4.4568854568854575e-05,
"loss": 0.8687,
"step": 2110
},
{
"epoch": 0.5456183245399563,
"grad_norm": 0.8034738302230835,
"learning_rate": 4.4543114543114546e-05,
"loss": 0.8746,
"step": 2120
},
{
"epoch": 0.5481919958821259,
"grad_norm": 0.8792498707771301,
"learning_rate": 4.4517374517374525e-05,
"loss": 0.8886,
"step": 2130
},
{
"epoch": 0.5507656672242954,
"grad_norm": 3.1286137104034424,
"learning_rate": 4.449163449163449e-05,
"loss": 0.8574,
"step": 2140
},
{
"epoch": 0.5533393385664651,
"grad_norm": 0.9840642213821411,
"learning_rate": 4.446589446589447e-05,
"loss": 0.815,
"step": 2150
},
{
"epoch": 0.5559130099086347,
"grad_norm": 1.1499487161636353,
"learning_rate": 4.444015444015444e-05,
"loss": 0.8691,
"step": 2160
},
{
"epoch": 0.5584866812508043,
"grad_norm": 0.8926144242286682,
"learning_rate": 4.441441441441442e-05,
"loss": 0.8082,
"step": 2170
},
{
"epoch": 0.5610603525929739,
"grad_norm": 0.7535387873649597,
"learning_rate": 4.438867438867439e-05,
"loss": 0.9033,
"step": 2180
},
{
"epoch": 0.5636340239351435,
"grad_norm": 1.196241021156311,
"learning_rate": 4.436293436293437e-05,
"loss": 0.9039,
"step": 2190
},
{
"epoch": 0.5662076952773131,
"grad_norm": 2.5870370864868164,
"learning_rate": 4.433719433719434e-05,
"loss": 0.8692,
"step": 2200
},
{
"epoch": 0.5687813666194826,
"grad_norm": 1.209121823310852,
"learning_rate": 4.431145431145432e-05,
"loss": 0.8467,
"step": 2210
},
{
"epoch": 0.5713550379616523,
"grad_norm": 0.988710343837738,
"learning_rate": 4.428571428571428e-05,
"loss": 0.7887,
"step": 2220
},
{
"epoch": 0.5739287093038219,
"grad_norm": 0.9768078327178955,
"learning_rate": 4.425997425997426e-05,
"loss": 0.8597,
"step": 2230
},
{
"epoch": 0.5765023806459915,
"grad_norm": 1.3332276344299316,
"learning_rate": 4.423423423423423e-05,
"loss": 0.9351,
"step": 2240
},
{
"epoch": 0.5790760519881611,
"grad_norm": 2.8126509189605713,
"learning_rate": 4.420849420849421e-05,
"loss": 0.8706,
"step": 2250
},
{
"epoch": 0.5816497233303307,
"grad_norm": 1.1589819192886353,
"learning_rate": 4.418275418275418e-05,
"loss": 0.874,
"step": 2260
},
{
"epoch": 0.5842233946725003,
"grad_norm": 1.085998296737671,
"learning_rate": 4.415701415701416e-05,
"loss": 0.8321,
"step": 2270
},
{
"epoch": 0.58679706601467,
"grad_norm": 0.8814729452133179,
"learning_rate": 4.413127413127413e-05,
"loss": 0.8407,
"step": 2280
},
{
"epoch": 0.5893707373568395,
"grad_norm": 3.6216423511505127,
"learning_rate": 4.4105534105534104e-05,
"loss": 0.7986,
"step": 2290
},
{
"epoch": 0.5919444086990091,
"grad_norm": 3.6593005657196045,
"learning_rate": 4.407979407979408e-05,
"loss": 0.8252,
"step": 2300
},
{
"epoch": 0.5945180800411788,
"grad_norm": 0.9884054660797119,
"learning_rate": 4.4054054054054054e-05,
"loss": 0.8606,
"step": 2310
},
{
"epoch": 0.5970917513833484,
"grad_norm": 1.0792969465255737,
"learning_rate": 4.402831402831403e-05,
"loss": 0.7866,
"step": 2320
},
{
"epoch": 0.5996654227255179,
"grad_norm": 1.0255166292190552,
"learning_rate": 4.4002574002574004e-05,
"loss": 0.7659,
"step": 2330
},
{
"epoch": 0.6022390940676876,
"grad_norm": 0.7235389947891235,
"learning_rate": 4.397683397683398e-05,
"loss": 0.8475,
"step": 2340
},
{
"epoch": 0.6048127654098572,
"grad_norm": 0.8555417060852051,
"learning_rate": 4.3951093951093954e-05,
"loss": 0.8234,
"step": 2350
},
{
"epoch": 0.6073864367520267,
"grad_norm": 1.1346522569656372,
"learning_rate": 4.3925353925353926e-05,
"loss": 0.8572,
"step": 2360
},
{
"epoch": 0.6099601080941963,
"grad_norm": 3.7354023456573486,
"learning_rate": 4.38996138996139e-05,
"loss": 0.842,
"step": 2370
},
{
"epoch": 0.612533779436366,
"grad_norm": 1.0269885063171387,
"learning_rate": 4.3873873873873876e-05,
"loss": 0.8483,
"step": 2380
},
{
"epoch": 0.6151074507785356,
"grad_norm": 4.06434965133667,
"learning_rate": 4.384813384813385e-05,
"loss": 0.9082,
"step": 2390
},
{
"epoch": 0.6176811221207051,
"grad_norm": 1.0140035152435303,
"learning_rate": 4.3822393822393826e-05,
"loss": 0.8993,
"step": 2400
},
{
"epoch": 0.6202547934628748,
"grad_norm": 0.9860438108444214,
"learning_rate": 4.37966537966538e-05,
"loss": 0.8143,
"step": 2410
},
{
"epoch": 0.6228284648050444,
"grad_norm": 0.9462414979934692,
"learning_rate": 4.3770913770913776e-05,
"loss": 0.8752,
"step": 2420
},
{
"epoch": 0.625402136147214,
"grad_norm": 0.889310896396637,
"learning_rate": 4.374517374517375e-05,
"loss": 0.8576,
"step": 2430
},
{
"epoch": 0.6279758074893836,
"grad_norm": 1.0256364345550537,
"learning_rate": 4.371943371943372e-05,
"loss": 0.8331,
"step": 2440
},
{
"epoch": 0.6305494788315532,
"grad_norm": 1.242521047592163,
"learning_rate": 4.369369369369369e-05,
"loss": 0.917,
"step": 2450
},
{
"epoch": 0.6331231501737228,
"grad_norm": 0.8450289368629456,
"learning_rate": 4.366795366795367e-05,
"loss": 0.8463,
"step": 2460
},
{
"epoch": 0.6356968215158925,
"grad_norm": 2.8590588569641113,
"learning_rate": 4.364221364221364e-05,
"loss": 0.8509,
"step": 2470
},
{
"epoch": 0.638270492858062,
"grad_norm": 0.9202858805656433,
"learning_rate": 4.361647361647362e-05,
"loss": 0.8259,
"step": 2480
},
{
"epoch": 0.6408441642002316,
"grad_norm": 1.0481261014938354,
"learning_rate": 4.359073359073359e-05,
"loss": 0.8764,
"step": 2490
},
{
"epoch": 0.6434178355424013,
"grad_norm": 1.0081082582473755,
"learning_rate": 4.356499356499357e-05,
"loss": 0.8097,
"step": 2500
},
{
"epoch": 0.6459915068845709,
"grad_norm": 0.9049886465072632,
"learning_rate": 4.353925353925354e-05,
"loss": 0.8617,
"step": 2510
},
{
"epoch": 0.6485651782267404,
"grad_norm": 1.0273019075393677,
"learning_rate": 4.351351351351351e-05,
"loss": 0.8457,
"step": 2520
},
{
"epoch": 0.6511388495689101,
"grad_norm": 3.1183676719665527,
"learning_rate": 4.348777348777349e-05,
"loss": 0.8202,
"step": 2530
},
{
"epoch": 0.6537125209110797,
"grad_norm": 0.9991865158081055,
"learning_rate": 4.346203346203346e-05,
"loss": 0.8147,
"step": 2540
},
{
"epoch": 0.6562861922532492,
"grad_norm": 1.0085402727127075,
"learning_rate": 4.343629343629344e-05,
"loss": 0.8073,
"step": 2550
},
{
"epoch": 0.6588598635954188,
"grad_norm": 0.8995606899261475,
"learning_rate": 4.341055341055341e-05,
"loss": 0.8394,
"step": 2560
},
{
"epoch": 0.6614335349375885,
"grad_norm": 0.9390326142311096,
"learning_rate": 4.338481338481339e-05,
"loss": 0.8784,
"step": 2570
},
{
"epoch": 0.6640072062797581,
"grad_norm": 0.7828226089477539,
"learning_rate": 4.335907335907336e-05,
"loss": 0.861,
"step": 2580
},
{
"epoch": 0.6665808776219276,
"grad_norm": 0.752167284488678,
"learning_rate": 4.3333333333333334e-05,
"loss": 0.8099,
"step": 2590
},
{
"epoch": 0.6691545489640973,
"grad_norm": 0.9056263566017151,
"learning_rate": 4.3307593307593306e-05,
"loss": 0.9575,
"step": 2600
},
{
"epoch": 0.6717282203062669,
"grad_norm": 1.1399377584457397,
"learning_rate": 4.3281853281853284e-05,
"loss": 0.8491,
"step": 2610
},
{
"epoch": 0.6743018916484365,
"grad_norm": 1.1799904108047485,
"learning_rate": 4.3256113256113256e-05,
"loss": 0.8375,
"step": 2620
},
{
"epoch": 0.6768755629906061,
"grad_norm": 3.550177812576294,
"learning_rate": 4.3230373230373234e-05,
"loss": 0.8797,
"step": 2630
},
{
"epoch": 0.6794492343327757,
"grad_norm": 0.8482165336608887,
"learning_rate": 4.3204633204633206e-05,
"loss": 0.8424,
"step": 2640
},
{
"epoch": 0.6820229056749453,
"grad_norm": 0.8729701042175293,
"learning_rate": 4.3178893178893184e-05,
"loss": 0.7896,
"step": 2650
},
{
"epoch": 0.684596577017115,
"grad_norm": 1.0203555822372437,
"learning_rate": 4.3153153153153156e-05,
"loss": 0.8836,
"step": 2660
},
{
"epoch": 0.6871702483592845,
"grad_norm": 1.1605058908462524,
"learning_rate": 4.312741312741313e-05,
"loss": 0.7561,
"step": 2670
},
{
"epoch": 0.6897439197014541,
"grad_norm": 1.3112170696258545,
"learning_rate": 4.31016731016731e-05,
"loss": 0.8504,
"step": 2680
},
{
"epoch": 0.6923175910436238,
"grad_norm": 2.7593069076538086,
"learning_rate": 4.307593307593308e-05,
"loss": 0.9337,
"step": 2690
},
{
"epoch": 0.6948912623857934,
"grad_norm": 0.7685155272483826,
"learning_rate": 4.305019305019305e-05,
"loss": 0.9312,
"step": 2700
},
{
"epoch": 0.6974649337279629,
"grad_norm": 0.9489961862564087,
"learning_rate": 4.302445302445303e-05,
"loss": 0.839,
"step": 2710
},
{
"epoch": 0.7000386050701325,
"grad_norm": 2.002380132675171,
"learning_rate": 4.2998712998713006e-05,
"loss": 0.7451,
"step": 2720
},
{
"epoch": 0.7026122764123022,
"grad_norm": 1.3577697277069092,
"learning_rate": 4.297297297297298e-05,
"loss": 0.823,
"step": 2730
},
{
"epoch": 0.7051859477544717,
"grad_norm": 0.991814911365509,
"learning_rate": 4.294723294723295e-05,
"loss": 0.7832,
"step": 2740
},
{
"epoch": 0.7077596190966413,
"grad_norm": 1.302040696144104,
"learning_rate": 4.292149292149292e-05,
"loss": 0.8151,
"step": 2750
},
{
"epoch": 0.710333290438811,
"grad_norm": 1.1003100872039795,
"learning_rate": 4.28957528957529e-05,
"loss": 0.8792,
"step": 2760
},
{
"epoch": 0.7129069617809806,
"grad_norm": 1.0496855974197388,
"learning_rate": 4.287001287001287e-05,
"loss": 0.8354,
"step": 2770
},
{
"epoch": 0.7154806331231501,
"grad_norm": 1.375705599784851,
"learning_rate": 4.284427284427285e-05,
"loss": 0.7915,
"step": 2780
},
{
"epoch": 0.7180543044653198,
"grad_norm": 1.030092716217041,
"learning_rate": 4.281853281853282e-05,
"loss": 0.828,
"step": 2790
},
{
"epoch": 0.7206279758074894,
"grad_norm": 2.520185708999634,
"learning_rate": 4.27927927927928e-05,
"loss": 0.8099,
"step": 2800
},
{
"epoch": 0.723201647149659,
"grad_norm": 0.7944628596305847,
"learning_rate": 4.276705276705277e-05,
"loss": 0.8974,
"step": 2810
},
{
"epoch": 0.7257753184918286,
"grad_norm": 1.2783660888671875,
"learning_rate": 4.274131274131274e-05,
"loss": 0.7787,
"step": 2820
},
{
"epoch": 0.7283489898339982,
"grad_norm": 1.3568511009216309,
"learning_rate": 4.2715572715572714e-05,
"loss": 0.8208,
"step": 2830
},
{
"epoch": 0.7309226611761678,
"grad_norm": 2.9468472003936768,
"learning_rate": 4.268983268983269e-05,
"loss": 0.8063,
"step": 2840
},
{
"epoch": 0.7334963325183375,
"grad_norm": 0.871306300163269,
"learning_rate": 4.2664092664092664e-05,
"loss": 0.7988,
"step": 2850
},
{
"epoch": 0.736070003860507,
"grad_norm": 1.0941967964172363,
"learning_rate": 4.263835263835264e-05,
"loss": 0.8288,
"step": 2860
},
{
"epoch": 0.7386436752026766,
"grad_norm": 0.992978036403656,
"learning_rate": 4.2612612612612614e-05,
"loss": 0.8742,
"step": 2870
},
{
"epoch": 0.7412173465448463,
"grad_norm": 0.8720837831497192,
"learning_rate": 4.258687258687259e-05,
"loss": 0.8032,
"step": 2880
},
{
"epoch": 0.7437910178870158,
"grad_norm": 3.578805446624756,
"learning_rate": 4.2561132561132564e-05,
"loss": 0.8462,
"step": 2890
},
{
"epoch": 0.7463646892291854,
"grad_norm": 1.0471611022949219,
"learning_rate": 4.2535392535392535e-05,
"loss": 0.8461,
"step": 2900
},
{
"epoch": 0.748938360571355,
"grad_norm": 0.9668796062469482,
"learning_rate": 4.2509652509652514e-05,
"loss": 0.8373,
"step": 2910
},
{
"epoch": 0.7515120319135247,
"grad_norm": 0.8879560232162476,
"learning_rate": 4.2483912483912485e-05,
"loss": 0.8707,
"step": 2920
},
{
"epoch": 0.7540857032556942,
"grad_norm": 0.8577817678451538,
"learning_rate": 4.2458172458172464e-05,
"loss": 0.8934,
"step": 2930
},
{
"epoch": 0.7566593745978638,
"grad_norm": 2.37251877784729,
"learning_rate": 4.2432432432432435e-05,
"loss": 0.8708,
"step": 2940
},
{
"epoch": 0.7592330459400335,
"grad_norm": 0.7796174883842468,
"learning_rate": 4.2406692406692414e-05,
"loss": 0.8832,
"step": 2950
},
{
"epoch": 0.7618067172822031,
"grad_norm": 1.032285213470459,
"learning_rate": 4.2380952380952385e-05,
"loss": 0.8615,
"step": 2960
},
{
"epoch": 0.7643803886243726,
"grad_norm": 4.347442150115967,
"learning_rate": 4.235521235521236e-05,
"loss": 0.8413,
"step": 2970
},
{
"epoch": 0.7669540599665423,
"grad_norm": 1.1776065826416016,
"learning_rate": 4.232947232947233e-05,
"loss": 0.8533,
"step": 2980
},
{
"epoch": 0.7695277313087119,
"grad_norm": 0.956089973449707,
"learning_rate": 4.230373230373231e-05,
"loss": 0.8051,
"step": 2990
},
{
"epoch": 0.7721014026508815,
"grad_norm": 2.812290906906128,
"learning_rate": 4.227799227799228e-05,
"loss": 0.816,
"step": 3000
},
{
"epoch": 0.7721014026508815,
"eval_loss": 0.8144821524620056,
"eval_runtime": 395.614,
"eval_samples_per_second": 49.103,
"eval_steps_per_second": 2.457,
"eval_token_accuracy": 0.00037428161464936723,
"step": 3000
},
{
"epoch": 0.7746750739930511,
"grad_norm": 0.971863865852356,
"learning_rate": 4.225225225225226e-05,
"loss": 0.8316,
"step": 3010
},
{
"epoch": 0.7772487453352207,
"grad_norm": 0.9860455393791199,
"learning_rate": 4.222651222651223e-05,
"loss": 0.8641,
"step": 3020
},
{
"epoch": 0.7798224166773903,
"grad_norm": 1.0967594385147095,
"learning_rate": 4.220077220077221e-05,
"loss": 0.8423,
"step": 3030
},
{
"epoch": 0.78239608801956,
"grad_norm": 0.7346376776695251,
"learning_rate": 4.217503217503218e-05,
"loss": 0.8773,
"step": 3040
},
{
"epoch": 0.7849697593617295,
"grad_norm": 0.7431369423866272,
"learning_rate": 4.214929214929215e-05,
"loss": 0.8448,
"step": 3050
},
{
"epoch": 0.7875434307038991,
"grad_norm": 1.0294193029403687,
"learning_rate": 4.212355212355212e-05,
"loss": 0.7999,
"step": 3060
},
{
"epoch": 0.7901171020460687,
"grad_norm": 3.059105157852173,
"learning_rate": 4.20978120978121e-05,
"loss": 0.8155,
"step": 3070
},
{
"epoch": 0.7926907733882383,
"grad_norm": 1.1290513277053833,
"learning_rate": 4.207207207207207e-05,
"loss": 0.9172,
"step": 3080
},
{
"epoch": 0.7952644447304079,
"grad_norm": 0.7203499674797058,
"learning_rate": 4.204633204633205e-05,
"loss": 0.8059,
"step": 3090
},
{
"epoch": 0.7978381160725775,
"grad_norm": 3.0688018798828125,
"learning_rate": 4.202059202059202e-05,
"loss": 0.9509,
"step": 3100
},
{
"epoch": 0.8004117874147472,
"grad_norm": 4.001659393310547,
"learning_rate": 4.1994851994852e-05,
"loss": 0.7508,
"step": 3110
},
{
"epoch": 0.8029854587569167,
"grad_norm": 1.1222361326217651,
"learning_rate": 4.196911196911197e-05,
"loss": 0.9271,
"step": 3120
},
{
"epoch": 0.8055591300990863,
"grad_norm": 0.8683832287788391,
"learning_rate": 4.194337194337194e-05,
"loss": 0.8042,
"step": 3130
},
{
"epoch": 0.808132801441256,
"grad_norm": 0.89637291431427,
"learning_rate": 4.191763191763192e-05,
"loss": 0.7162,
"step": 3140
},
{
"epoch": 0.8107064727834256,
"grad_norm": 0.8857401609420776,
"learning_rate": 4.189189189189189e-05,
"loss": 0.7789,
"step": 3150
},
{
"epoch": 0.8132801441255951,
"grad_norm": 1.2026958465576172,
"learning_rate": 4.186615186615187e-05,
"loss": 0.8782,
"step": 3160
},
{
"epoch": 0.8158538154677648,
"grad_norm": 0.8439406156539917,
"learning_rate": 4.184041184041184e-05,
"loss": 0.8504,
"step": 3170
},
{
"epoch": 0.8184274868099344,
"grad_norm": 0.9551829099655151,
"learning_rate": 4.181467181467182e-05,
"loss": 0.8152,
"step": 3180
},
{
"epoch": 0.821001158152104,
"grad_norm": 1.0437098741531372,
"learning_rate": 4.1788931788931787e-05,
"loss": 0.8451,
"step": 3190
},
{
"epoch": 0.8235748294942736,
"grad_norm": 0.9579282402992249,
"learning_rate": 4.1763191763191765e-05,
"loss": 0.8714,
"step": 3200
},
{
"epoch": 0.8261485008364432,
"grad_norm": 3.2837677001953125,
"learning_rate": 4.1737451737451737e-05,
"loss": 0.9142,
"step": 3210
},
{
"epoch": 0.8287221721786128,
"grad_norm": 0.8237940073013306,
"learning_rate": 4.1711711711711715e-05,
"loss": 0.8592,
"step": 3220
},
{
"epoch": 0.8312958435207825,
"grad_norm": 0.9125580787658691,
"learning_rate": 4.1685971685971687e-05,
"loss": 0.8598,
"step": 3230
},
{
"epoch": 0.833869514862952,
"grad_norm": 3.1465914249420166,
"learning_rate": 4.1660231660231665e-05,
"loss": 0.756,
"step": 3240
},
{
"epoch": 0.8364431862051216,
"grad_norm": 0.9401727318763733,
"learning_rate": 4.1634491634491637e-05,
"loss": 0.765,
"step": 3250
},
{
"epoch": 0.8390168575472912,
"grad_norm": 1.0945508480072021,
"learning_rate": 4.1608751608751615e-05,
"loss": 0.8335,
"step": 3260
},
{
"epoch": 0.8415905288894608,
"grad_norm": 1.5504162311553955,
"learning_rate": 4.158301158301158e-05,
"loss": 0.8269,
"step": 3270
},
{
"epoch": 0.8441642002316304,
"grad_norm": 0.7810274362564087,
"learning_rate": 4.155727155727156e-05,
"loss": 0.8326,
"step": 3280
},
{
"epoch": 0.8467378715738,
"grad_norm": 1.2022150754928589,
"learning_rate": 4.153153153153153e-05,
"loss": 0.7965,
"step": 3290
},
{
"epoch": 0.8493115429159697,
"grad_norm": 2.462810754776001,
"learning_rate": 4.150579150579151e-05,
"loss": 0.8781,
"step": 3300
},
{
"epoch": 0.8518852142581392,
"grad_norm": 1.1842944622039795,
"learning_rate": 4.148005148005148e-05,
"loss": 0.7906,
"step": 3310
},
{
"epoch": 0.8544588856003088,
"grad_norm": 0.8692466020584106,
"learning_rate": 4.145431145431146e-05,
"loss": 0.8167,
"step": 3320
},
{
"epoch": 0.8570325569424785,
"grad_norm": 1.1452484130859375,
"learning_rate": 4.1428571428571437e-05,
"loss": 0.795,
"step": 3330
},
{
"epoch": 0.859606228284648,
"grad_norm": 1.0664327144622803,
"learning_rate": 4.14028314028314e-05,
"loss": 0.8176,
"step": 3340
},
{
"epoch": 0.8621798996268176,
"grad_norm": 1.0237557888031006,
"learning_rate": 4.137709137709138e-05,
"loss": 0.8243,
"step": 3350
},
{
"epoch": 0.8647535709689873,
"grad_norm": 1.301072597503662,
"learning_rate": 4.135135135135135e-05,
"loss": 0.7853,
"step": 3360
},
{
"epoch": 0.8673272423111569,
"grad_norm": 1.061145544052124,
"learning_rate": 4.132561132561133e-05,
"loss": 0.8669,
"step": 3370
},
{
"epoch": 0.8699009136533264,
"grad_norm": 4.076564788818359,
"learning_rate": 4.12998712998713e-05,
"loss": 0.8657,
"step": 3380
},
{
"epoch": 0.8724745849954961,
"grad_norm": 0.8901606798171997,
"learning_rate": 4.127413127413128e-05,
"loss": 0.8867,
"step": 3390
},
{
"epoch": 0.8750482563376657,
"grad_norm": 1.0653889179229736,
"learning_rate": 4.124839124839125e-05,
"loss": 0.8954,
"step": 3400
},
{
"epoch": 0.8776219276798353,
"grad_norm": 0.9028094410896301,
"learning_rate": 4.122265122265122e-05,
"loss": 0.8455,
"step": 3410
},
{
"epoch": 0.8801955990220048,
"grad_norm": 0.7900987267494202,
"learning_rate": 4.1196911196911195e-05,
"loss": 0.8002,
"step": 3420
},
{
"epoch": 0.8827692703641745,
"grad_norm": 3.244790554046631,
"learning_rate": 4.117117117117117e-05,
"loss": 0.7287,
"step": 3430
},
{
"epoch": 0.8853429417063441,
"grad_norm": 1.0797080993652344,
"learning_rate": 4.1145431145431145e-05,
"loss": 0.8955,
"step": 3440
},
{
"epoch": 0.8879166130485137,
"grad_norm": 1.0693559646606445,
"learning_rate": 4.111969111969112e-05,
"loss": 0.8617,
"step": 3450
},
{
"epoch": 0.8904902843906833,
"grad_norm": 0.8851631283760071,
"learning_rate": 4.1093951093951095e-05,
"loss": 0.8405,
"step": 3460
},
{
"epoch": 0.8930639557328529,
"grad_norm": 2.6843698024749756,
"learning_rate": 4.106821106821107e-05,
"loss": 0.8352,
"step": 3470
},
{
"epoch": 0.8956376270750225,
"grad_norm": 3.100241184234619,
"learning_rate": 4.1042471042471045e-05,
"loss": 0.7919,
"step": 3480
},
{
"epoch": 0.8982112984171922,
"grad_norm": 0.8943722248077393,
"learning_rate": 4.1016731016731016e-05,
"loss": 0.8122,
"step": 3490
},
{
"epoch": 0.9007849697593617,
"grad_norm": 0.827441394329071,
"learning_rate": 4.099099099099099e-05,
"loss": 0.8883,
"step": 3500
},
{
"epoch": 0.9033586411015313,
"grad_norm": 1.1157348155975342,
"learning_rate": 4.0965250965250966e-05,
"loss": 0.8779,
"step": 3510
},
{
"epoch": 0.905932312443701,
"grad_norm": 1.1023356914520264,
"learning_rate": 4.093951093951094e-05,
"loss": 0.8149,
"step": 3520
},
{
"epoch": 0.9085059837858706,
"grad_norm": 0.661834180355072,
"learning_rate": 4.0913770913770916e-05,
"loss": 0.7732,
"step": 3530
},
{
"epoch": 0.9110796551280401,
"grad_norm": 1.1258097887039185,
"learning_rate": 4.0888030888030895e-05,
"loss": 0.7748,
"step": 3540
},
{
"epoch": 0.9136533264702098,
"grad_norm": 1.0704106092453003,
"learning_rate": 4.0862290862290866e-05,
"loss": 0.8873,
"step": 3550
},
{
"epoch": 0.9162269978123794,
"grad_norm": 0.9617460370063782,
"learning_rate": 4.083655083655084e-05,
"loss": 0.7,
"step": 3560
},
{
"epoch": 0.9188006691545489,
"grad_norm": 0.8261712789535522,
"learning_rate": 4.081081081081081e-05,
"loss": 0.7724,
"step": 3570
},
{
"epoch": 0.9213743404967185,
"grad_norm": 0.8264958262443542,
"learning_rate": 4.078507078507079e-05,
"loss": 0.8189,
"step": 3580
},
{
"epoch": 0.9239480118388882,
"grad_norm": 0.9358187913894653,
"learning_rate": 4.075933075933076e-05,
"loss": 0.823,
"step": 3590
},
{
"epoch": 0.9265216831810578,
"grad_norm": 0.9269980788230896,
"learning_rate": 4.073359073359074e-05,
"loss": 0.7901,
"step": 3600
},
{
"epoch": 0.9290953545232273,
"grad_norm": 1.0300779342651367,
"learning_rate": 4.070785070785071e-05,
"loss": 0.8129,
"step": 3610
},
{
"epoch": 0.931669025865397,
"grad_norm": 2.253840446472168,
"learning_rate": 4.068211068211069e-05,
"loss": 0.785,
"step": 3620
},
{
"epoch": 0.9342426972075666,
"grad_norm": 0.7381809949874878,
"learning_rate": 4.065637065637066e-05,
"loss": 0.8316,
"step": 3630
},
{
"epoch": 0.9368163685497362,
"grad_norm": 2.6727185249328613,
"learning_rate": 4.063063063063063e-05,
"loss": 0.88,
"step": 3640
},
{
"epoch": 0.9393900398919058,
"grad_norm": 0.7248988747596741,
"learning_rate": 4.06048906048906e-05,
"loss": 0.8239,
"step": 3650
},
{
"epoch": 0.9419637112340754,
"grad_norm": 0.8684216141700745,
"learning_rate": 4.057915057915058e-05,
"loss": 0.8899,
"step": 3660
},
{
"epoch": 0.944537382576245,
"grad_norm": 2.2277228832244873,
"learning_rate": 4.055341055341055e-05,
"loss": 0.8437,
"step": 3670
},
{
"epoch": 0.9471110539184147,
"grad_norm": 0.946140706539154,
"learning_rate": 4.052767052767053e-05,
"loss": 0.7992,
"step": 3680
},
{
"epoch": 0.9496847252605842,
"grad_norm": 0.7557298541069031,
"learning_rate": 4.05019305019305e-05,
"loss": 0.849,
"step": 3690
},
{
"epoch": 0.9522583966027538,
"grad_norm": 1.0458942651748657,
"learning_rate": 4.047619047619048e-05,
"loss": 0.8492,
"step": 3700
},
{
"epoch": 0.9548320679449235,
"grad_norm": 1.1995244026184082,
"learning_rate": 4.045045045045045e-05,
"loss": 0.7823,
"step": 3710
},
{
"epoch": 0.957405739287093,
"grad_norm": 3.7634544372558594,
"learning_rate": 4.0424710424710424e-05,
"loss": 0.7751,
"step": 3720
},
{
"epoch": 0.9599794106292626,
"grad_norm": 1.2692762613296509,
"learning_rate": 4.0398970398970396e-05,
"loss": 0.8069,
"step": 3730
},
{
"epoch": 0.9625530819714323,
"grad_norm": 1.2810945510864258,
"learning_rate": 4.0373230373230374e-05,
"loss": 0.9275,
"step": 3740
},
{
"epoch": 0.9651267533136019,
"grad_norm": 1.7654497623443604,
"learning_rate": 4.034749034749035e-05,
"loss": 0.8789,
"step": 3750
},
{
"epoch": 0.9677004246557714,
"grad_norm": 3.197882652282715,
"learning_rate": 4.0321750321750324e-05,
"loss": 0.8377,
"step": 3760
},
{
"epoch": 0.970274095997941,
"grad_norm": 0.8221775889396667,
"learning_rate": 4.02960102960103e-05,
"loss": 0.7834,
"step": 3770
},
{
"epoch": 0.9728477673401107,
"grad_norm": 0.9172367453575134,
"learning_rate": 4.0270270270270274e-05,
"loss": 0.8551,
"step": 3780
},
{
"epoch": 0.9754214386822803,
"grad_norm": 0.8287565112113953,
"learning_rate": 4.0244530244530246e-05,
"loss": 0.8427,
"step": 3790
},
{
"epoch": 0.9779951100244498,
"grad_norm": 0.9844699501991272,
"learning_rate": 4.021879021879022e-05,
"loss": 0.8414,
"step": 3800
},
{
"epoch": 0.9805687813666195,
"grad_norm": 0.8020986318588257,
"learning_rate": 4.0193050193050196e-05,
"loss": 0.9036,
"step": 3810
},
{
"epoch": 0.9831424527087891,
"grad_norm": 0.9322710633277893,
"learning_rate": 4.016731016731017e-05,
"loss": 0.773,
"step": 3820
},
{
"epoch": 0.9857161240509587,
"grad_norm": 1.5379607677459717,
"learning_rate": 4.0141570141570146e-05,
"loss": 0.8547,
"step": 3830
},
{
"epoch": 0.9882897953931283,
"grad_norm": 3.0354602336883545,
"learning_rate": 4.011583011583012e-05,
"loss": 0.8487,
"step": 3840
},
{
"epoch": 0.9908634667352979,
"grad_norm": 0.9367321133613586,
"learning_rate": 4.0090090090090096e-05,
"loss": 0.8564,
"step": 3850
},
{
"epoch": 0.9934371380774675,
"grad_norm": 0.9189753532409668,
"learning_rate": 4.006435006435007e-05,
"loss": 0.8674,
"step": 3860
},
{
"epoch": 0.9960108094196372,
"grad_norm": 1.0535331964492798,
"learning_rate": 4.003861003861004e-05,
"loss": 0.8038,
"step": 3870
},
{
"epoch": 0.9985844807618067,
"grad_norm": 0.8121357560157776,
"learning_rate": 4.001287001287001e-05,
"loss": 0.8706,
"step": 3880
},
{
"epoch": 1.0011581521039763,
"grad_norm": 1.0434505939483643,
"learning_rate": 3.998712998712999e-05,
"loss": 0.7571,
"step": 3890
},
{
"epoch": 1.0037318234461459,
"grad_norm": 0.9326075315475464,
"learning_rate": 3.996138996138996e-05,
"loss": 0.736,
"step": 3900
},
{
"epoch": 1.0063054947883154,
"grad_norm": 0.7963811755180359,
"learning_rate": 3.993564993564994e-05,
"loss": 0.6516,
"step": 3910
},
{
"epoch": 1.0088791661304852,
"grad_norm": 3.7952184677124023,
"learning_rate": 3.990990990990991e-05,
"loss": 0.7163,
"step": 3920
},
{
"epoch": 1.0114528374726548,
"grad_norm": 3.2275214195251465,
"learning_rate": 3.988416988416989e-05,
"loss": 0.7229,
"step": 3930
},
{
"epoch": 1.0140265088148244,
"grad_norm": 0.9668965339660645,
"learning_rate": 3.985842985842986e-05,
"loss": 0.6849,
"step": 3940
},
{
"epoch": 1.016600180156994,
"grad_norm": 1.117182731628418,
"learning_rate": 3.983268983268983e-05,
"loss": 0.7061,
"step": 3950
},
{
"epoch": 1.0191738514991635,
"grad_norm": 2.518076181411743,
"learning_rate": 3.980694980694981e-05,
"loss": 0.7505,
"step": 3960
},
{
"epoch": 1.021747522841333,
"grad_norm": 1.0619583129882812,
"learning_rate": 3.978120978120978e-05,
"loss": 0.7439,
"step": 3970
},
{
"epoch": 1.0243211941835029,
"grad_norm": 0.724909245967865,
"learning_rate": 3.975546975546976e-05,
"loss": 0.6356,
"step": 3980
},
{
"epoch": 1.0268948655256724,
"grad_norm": 0.8852310180664062,
"learning_rate": 3.972972972972973e-05,
"loss": 0.6923,
"step": 3990
},
{
"epoch": 1.029468536867842,
"grad_norm": 1.0579869747161865,
"learning_rate": 3.970398970398971e-05,
"loss": 0.6591,
"step": 4000
},
{
"epoch": 1.029468536867842,
"eval_loss": 0.8084650039672852,
"eval_runtime": 395.6791,
"eval_samples_per_second": 49.095,
"eval_steps_per_second": 2.457,
"eval_token_accuracy": 0.00037788777625095846,
"step": 4000
},
{
"epoch": 1.0320422082100116,
"grad_norm": 1.5254931449890137,
"learning_rate": 3.967824967824968e-05,
"loss": 0.7753,
"step": 4010
},
{
"epoch": 1.0346158795521812,
"grad_norm": 0.9235827922821045,
"learning_rate": 3.9652509652509654e-05,
"loss": 0.6961,
"step": 4020
},
{
"epoch": 1.0371895508943507,
"grad_norm": 1.1861381530761719,
"learning_rate": 3.9626769626769626e-05,
"loss": 0.6979,
"step": 4030
},
{
"epoch": 1.0397632222365203,
"grad_norm": 1.0676695108413696,
"learning_rate": 3.9601029601029604e-05,
"loss": 0.7373,
"step": 4040
},
{
"epoch": 1.04233689357869,
"grad_norm": 0.9689359664916992,
"learning_rate": 3.9575289575289576e-05,
"loss": 0.7406,
"step": 4050
},
{
"epoch": 1.0449105649208597,
"grad_norm": 1.1008409261703491,
"learning_rate": 3.9549549549549554e-05,
"loss": 0.6522,
"step": 4060
},
{
"epoch": 1.0474842362630292,
"grad_norm": 1.293125033378601,
"learning_rate": 3.9523809523809526e-05,
"loss": 0.6762,
"step": 4070
},
{
"epoch": 1.0500579076051988,
"grad_norm": 0.9956673979759216,
"learning_rate": 3.9498069498069504e-05,
"loss": 0.6625,
"step": 4080
},
{
"epoch": 1.0526315789473684,
"grad_norm": 0.8510293960571289,
"learning_rate": 3.9472329472329476e-05,
"loss": 0.7396,
"step": 4090
},
{
"epoch": 1.055205250289538,
"grad_norm": 1.1811062097549438,
"learning_rate": 3.944658944658945e-05,
"loss": 0.6562,
"step": 4100
},
{
"epoch": 1.0577789216317077,
"grad_norm": 1.1089203357696533,
"learning_rate": 3.942084942084942e-05,
"loss": 0.6791,
"step": 4110
},
{
"epoch": 1.0603525929738773,
"grad_norm": 1.135820984840393,
"learning_rate": 3.93951093951094e-05,
"loss": 0.7083,
"step": 4120
},
{
"epoch": 1.0629262643160469,
"grad_norm": 0.8153538107872009,
"learning_rate": 3.936936936936937e-05,
"loss": 0.6713,
"step": 4130
},
{
"epoch": 1.0654999356582164,
"grad_norm": 1.1000064611434937,
"learning_rate": 3.934362934362935e-05,
"loss": 0.7594,
"step": 4140
},
{
"epoch": 1.068073607000386,
"grad_norm": 0.9205760955810547,
"learning_rate": 3.931788931788932e-05,
"loss": 0.7066,
"step": 4150
},
{
"epoch": 1.0706472783425556,
"grad_norm": 1.1012518405914307,
"learning_rate": 3.92921492921493e-05,
"loss": 0.691,
"step": 4160
},
{
"epoch": 1.0732209496847251,
"grad_norm": 0.9490509629249573,
"learning_rate": 3.926640926640927e-05,
"loss": 0.6361,
"step": 4170
},
{
"epoch": 1.075794621026895,
"grad_norm": 0.9934425950050354,
"learning_rate": 3.924066924066924e-05,
"loss": 0.6546,
"step": 4180
},
{
"epoch": 1.0783682923690645,
"grad_norm": 0.9373430609703064,
"learning_rate": 3.921492921492922e-05,
"loss": 0.7585,
"step": 4190
},
{
"epoch": 1.080941963711234,
"grad_norm": 0.7465323209762573,
"learning_rate": 3.918918918918919e-05,
"loss": 0.7026,
"step": 4200
},
{
"epoch": 1.0835156350534036,
"grad_norm": 0.8605757355690002,
"learning_rate": 3.916344916344917e-05,
"loss": 0.7215,
"step": 4210
},
{
"epoch": 1.0860893063955732,
"grad_norm": 1.27156400680542,
"learning_rate": 3.913770913770914e-05,
"loss": 0.7393,
"step": 4220
},
{
"epoch": 1.0886629777377428,
"grad_norm": 3.140281915664673,
"learning_rate": 3.911196911196912e-05,
"loss": 0.6781,
"step": 4230
},
{
"epoch": 1.0912366490799126,
"grad_norm": 1.438792109489441,
"learning_rate": 3.908622908622909e-05,
"loss": 0.7344,
"step": 4240
},
{
"epoch": 1.0938103204220821,
"grad_norm": 1.1474467515945435,
"learning_rate": 3.906048906048906e-05,
"loss": 0.6722,
"step": 4250
},
{
"epoch": 1.0963839917642517,
"grad_norm": 0.7898831963539124,
"learning_rate": 3.9034749034749034e-05,
"loss": 0.709,
"step": 4260
},
{
"epoch": 1.0989576631064213,
"grad_norm": 4.05173397064209,
"learning_rate": 3.900900900900901e-05,
"loss": 0.682,
"step": 4270
},
{
"epoch": 1.1015313344485909,
"grad_norm": 1.2369790077209473,
"learning_rate": 3.8983268983268984e-05,
"loss": 0.6313,
"step": 4280
},
{
"epoch": 1.1041050057907604,
"grad_norm": 1.0742212533950806,
"learning_rate": 3.895752895752896e-05,
"loss": 0.6878,
"step": 4290
},
{
"epoch": 1.1066786771329302,
"grad_norm": 4.394085884094238,
"learning_rate": 3.8931788931788934e-05,
"loss": 0.6974,
"step": 4300
},
{
"epoch": 1.1092523484750998,
"grad_norm": 1.285081148147583,
"learning_rate": 3.890604890604891e-05,
"loss": 0.6822,
"step": 4310
},
{
"epoch": 1.1118260198172694,
"grad_norm": 1.0450791120529175,
"learning_rate": 3.888030888030888e-05,
"loss": 0.6848,
"step": 4320
},
{
"epoch": 1.114399691159439,
"grad_norm": 0.808032214641571,
"learning_rate": 3.8854568854568855e-05,
"loss": 0.6127,
"step": 4330
},
{
"epoch": 1.1169733625016085,
"grad_norm": 0.6634959578514099,
"learning_rate": 3.882882882882883e-05,
"loss": 0.6332,
"step": 4340
},
{
"epoch": 1.119547033843778,
"grad_norm": 0.7773045301437378,
"learning_rate": 3.8803088803088805e-05,
"loss": 0.6558,
"step": 4350
},
{
"epoch": 1.1221207051859476,
"grad_norm": 0.9749086499214172,
"learning_rate": 3.877734877734878e-05,
"loss": 0.7365,
"step": 4360
},
{
"epoch": 1.1246943765281174,
"grad_norm": 1.0104619264602661,
"learning_rate": 3.8751608751608755e-05,
"loss": 0.731,
"step": 4370
},
{
"epoch": 1.127268047870287,
"grad_norm": 1.246193766593933,
"learning_rate": 3.8725868725868734e-05,
"loss": 0.6772,
"step": 4380
},
{
"epoch": 1.1298417192124566,
"grad_norm": 0.9654109477996826,
"learning_rate": 3.87001287001287e-05,
"loss": 0.7359,
"step": 4390
},
{
"epoch": 1.1324153905546261,
"grad_norm": 0.8749091029167175,
"learning_rate": 3.867438867438868e-05,
"loss": 0.6908,
"step": 4400
},
{
"epoch": 1.1349890618967957,
"grad_norm": 2.6671011447906494,
"learning_rate": 3.864864864864865e-05,
"loss": 0.6935,
"step": 4410
},
{
"epoch": 1.1375627332389655,
"grad_norm": 0.834434986114502,
"learning_rate": 3.862290862290863e-05,
"loss": 0.6978,
"step": 4420
},
{
"epoch": 1.140136404581135,
"grad_norm": 1.0454275608062744,
"learning_rate": 3.85971685971686e-05,
"loss": 0.7106,
"step": 4430
},
{
"epoch": 1.1427100759233046,
"grad_norm": 1.1424705982208252,
"learning_rate": 3.857142857142858e-05,
"loss": 0.6781,
"step": 4440
},
{
"epoch": 1.1452837472654742,
"grad_norm": 0.8291309475898743,
"learning_rate": 3.854568854568855e-05,
"loss": 0.6827,
"step": 4450
},
{
"epoch": 1.1478574186076438,
"grad_norm": 1.2139686346054077,
"learning_rate": 3.851994851994852e-05,
"loss": 0.6321,
"step": 4460
},
{
"epoch": 1.1504310899498134,
"grad_norm": 0.935051441192627,
"learning_rate": 3.849420849420849e-05,
"loss": 0.7246,
"step": 4470
},
{
"epoch": 1.153004761291983,
"grad_norm": 2.4891180992126465,
"learning_rate": 3.846846846846847e-05,
"loss": 0.6742,
"step": 4480
},
{
"epoch": 1.1555784326341527,
"grad_norm": 1.248016357421875,
"learning_rate": 3.844272844272844e-05,
"loss": 0.7204,
"step": 4490
},
{
"epoch": 1.1581521039763223,
"grad_norm": 3.4102649688720703,
"learning_rate": 3.841698841698842e-05,
"loss": 0.6985,
"step": 4500
},
{
"epoch": 1.1607257753184919,
"grad_norm": 1.1724501848220825,
"learning_rate": 3.839124839124839e-05,
"loss": 0.6828,
"step": 4510
},
{
"epoch": 1.1632994466606614,
"grad_norm": 0.8048924207687378,
"learning_rate": 3.836550836550837e-05,
"loss": 0.6608,
"step": 4520
},
{
"epoch": 1.165873118002831,
"grad_norm": 0.8188682794570923,
"learning_rate": 3.833976833976834e-05,
"loss": 0.671,
"step": 4530
},
{
"epoch": 1.1684467893450006,
"grad_norm": 0.879068911075592,
"learning_rate": 3.831402831402831e-05,
"loss": 0.7447,
"step": 4540
},
{
"epoch": 1.1710204606871701,
"grad_norm": 0.9891297817230225,
"learning_rate": 3.8288288288288285e-05,
"loss": 0.7087,
"step": 4550
},
{
"epoch": 1.17359413202934,
"grad_norm": 1.1570535898208618,
"learning_rate": 3.826254826254826e-05,
"loss": 0.6537,
"step": 4560
},
{
"epoch": 1.1761678033715095,
"grad_norm": 1.1008327007293701,
"learning_rate": 3.8236808236808235e-05,
"loss": 0.7138,
"step": 4570
},
{
"epoch": 1.178741474713679,
"grad_norm": 0.8416620492935181,
"learning_rate": 3.821106821106821e-05,
"loss": 0.6743,
"step": 4580
},
{
"epoch": 1.1813151460558486,
"grad_norm": 0.8778625726699829,
"learning_rate": 3.818532818532819e-05,
"loss": 0.6572,
"step": 4590
},
{
"epoch": 1.1838888173980182,
"grad_norm": 2.828808307647705,
"learning_rate": 3.815958815958816e-05,
"loss": 0.7036,
"step": 4600
},
{
"epoch": 1.1864624887401878,
"grad_norm": 1.3439350128173828,
"learning_rate": 3.8133848133848135e-05,
"loss": 0.759,
"step": 4610
},
{
"epoch": 1.1890361600823576,
"grad_norm": 0.7776147723197937,
"learning_rate": 3.8108108108108106e-05,
"loss": 0.7424,
"step": 4620
},
{
"epoch": 1.1916098314245271,
"grad_norm": 0.7925708293914795,
"learning_rate": 3.8082368082368085e-05,
"loss": 0.7157,
"step": 4630
},
{
"epoch": 1.1941835027666967,
"grad_norm": 0.9618962407112122,
"learning_rate": 3.8056628056628056e-05,
"loss": 0.6598,
"step": 4640
},
{
"epoch": 1.1967571741088663,
"grad_norm": 1.325016975402832,
"learning_rate": 3.8030888030888035e-05,
"loss": 0.7199,
"step": 4650
},
{
"epoch": 1.1993308454510359,
"grad_norm": 1.2265050411224365,
"learning_rate": 3.8005148005148006e-05,
"loss": 0.6643,
"step": 4660
},
{
"epoch": 1.2019045167932054,
"grad_norm": 3.7649102210998535,
"learning_rate": 3.7979407979407985e-05,
"loss": 0.6875,
"step": 4670
},
{
"epoch": 1.2044781881353752,
"grad_norm": 0.7999979257583618,
"learning_rate": 3.7953667953667956e-05,
"loss": 0.5883,
"step": 4680
},
{
"epoch": 1.2070518594775448,
"grad_norm": 1.012054681777954,
"learning_rate": 3.792792792792793e-05,
"loss": 0.707,
"step": 4690
},
{
"epoch": 1.2096255308197144,
"grad_norm": 3.4517581462860107,
"learning_rate": 3.79021879021879e-05,
"loss": 0.6276,
"step": 4700
},
{
"epoch": 1.212199202161884,
"grad_norm": 1.0213309526443481,
"learning_rate": 3.787644787644788e-05,
"loss": 0.6809,
"step": 4710
},
{
"epoch": 1.2147728735040535,
"grad_norm": 1.3979082107543945,
"learning_rate": 3.785070785070785e-05,
"loss": 0.7044,
"step": 4720
},
{
"epoch": 1.217346544846223,
"grad_norm": 0.9082636833190918,
"learning_rate": 3.782496782496783e-05,
"loss": 0.6827,
"step": 4730
},
{
"epoch": 1.2199202161883926,
"grad_norm": 0.9929207563400269,
"learning_rate": 3.77992277992278e-05,
"loss": 0.6552,
"step": 4740
},
{
"epoch": 1.2224938875305624,
"grad_norm": 0.873182475566864,
"learning_rate": 3.777348777348778e-05,
"loss": 0.5661,
"step": 4750
},
{
"epoch": 1.225067558872732,
"grad_norm": 1.0508198738098145,
"learning_rate": 3.774774774774775e-05,
"loss": 0.7024,
"step": 4760
},
{
"epoch": 1.2276412302149016,
"grad_norm": 1.1543952226638794,
"learning_rate": 3.772200772200772e-05,
"loss": 0.6994,
"step": 4770
},
{
"epoch": 1.2302149015570711,
"grad_norm": 3.8903653621673584,
"learning_rate": 3.76962676962677e-05,
"loss": 0.7234,
"step": 4780
},
{
"epoch": 1.2327885728992407,
"grad_norm": 1.0679028034210205,
"learning_rate": 3.767052767052767e-05,
"loss": 0.6768,
"step": 4790
},
{
"epoch": 1.2353622442414103,
"grad_norm": 2.860356330871582,
"learning_rate": 3.764478764478765e-05,
"loss": 0.7189,
"step": 4800
},
{
"epoch": 1.23793591558358,
"grad_norm": 1.192459225654602,
"learning_rate": 3.761904761904762e-05,
"loss": 0.7076,
"step": 4810
},
{
"epoch": 1.2405095869257496,
"grad_norm": 0.8065189719200134,
"learning_rate": 3.75933075933076e-05,
"loss": 0.6556,
"step": 4820
},
{
"epoch": 1.2430832582679192,
"grad_norm": 1.1386466026306152,
"learning_rate": 3.756756756756757e-05,
"loss": 0.7085,
"step": 4830
},
{
"epoch": 1.2456569296100888,
"grad_norm": 1.2543619871139526,
"learning_rate": 3.754182754182754e-05,
"loss": 0.7253,
"step": 4840
},
{
"epoch": 1.2482306009522584,
"grad_norm": 1.2314350605010986,
"learning_rate": 3.7516087516087514e-05,
"loss": 0.6282,
"step": 4850
},
{
"epoch": 1.250804272294428,
"grad_norm": 1.4268747568130493,
"learning_rate": 3.749034749034749e-05,
"loss": 0.705,
"step": 4860
},
{
"epoch": 1.2533779436365977,
"grad_norm": 0.9429617524147034,
"learning_rate": 3.7464607464607464e-05,
"loss": 0.702,
"step": 4870
},
{
"epoch": 1.2559516149787673,
"grad_norm": 1.0535614490509033,
"learning_rate": 3.743886743886744e-05,
"loss": 0.7376,
"step": 4880
},
{
"epoch": 1.2585252863209369,
"grad_norm": 4.010643005371094,
"learning_rate": 3.7413127413127414e-05,
"loss": 0.6941,
"step": 4890
},
{
"epoch": 1.2610989576631064,
"grad_norm": 0.8559678196907043,
"learning_rate": 3.738738738738739e-05,
"loss": 0.711,
"step": 4900
},
{
"epoch": 1.263672629005276,
"grad_norm": 0.8388733863830566,
"learning_rate": 3.7361647361647364e-05,
"loss": 0.7521,
"step": 4910
},
{
"epoch": 1.2662463003474456,
"grad_norm": 3.9231414794921875,
"learning_rate": 3.7335907335907336e-05,
"loss": 0.6587,
"step": 4920
},
{
"epoch": 1.2688199716896151,
"grad_norm": 1.1947028636932373,
"learning_rate": 3.731016731016731e-05,
"loss": 0.7029,
"step": 4930
},
{
"epoch": 1.271393643031785,
"grad_norm": 1.23485267162323,
"learning_rate": 3.7284427284427286e-05,
"loss": 0.7298,
"step": 4940
},
{
"epoch": 1.2739673143739545,
"grad_norm": 1.133660912513733,
"learning_rate": 3.725868725868726e-05,
"loss": 0.6935,
"step": 4950
},
{
"epoch": 1.276540985716124,
"grad_norm": 3.402822256088257,
"learning_rate": 3.7232947232947236e-05,
"loss": 0.649,
"step": 4960
},
{
"epoch": 1.2791146570582936,
"grad_norm": 1.2218265533447266,
"learning_rate": 3.720720720720721e-05,
"loss": 0.734,
"step": 4970
},
{
"epoch": 1.2816883284004632,
"grad_norm": 2.865407705307007,
"learning_rate": 3.7181467181467186e-05,
"loss": 0.6461,
"step": 4980
},
{
"epoch": 1.284261999742633,
"grad_norm": 1.4591176509857178,
"learning_rate": 3.715572715572716e-05,
"loss": 0.7582,
"step": 4990
},
{
"epoch": 1.2868356710848023,
"grad_norm": 3.178297519683838,
"learning_rate": 3.712998712998713e-05,
"loss": 0.7112,
"step": 5000
},
{
"epoch": 1.2868356710848023,
"eval_loss": 0.7959006428718567,
"eval_runtime": 395.735,
"eval_samples_per_second": 49.088,
"eval_steps_per_second": 2.456,
"eval_token_accuracy": 0.0003792163621041763,
"step": 5000
},
{
"epoch": 1.2894093424269721,
"grad_norm": 0.7849406003952026,
"learning_rate": 3.710424710424711e-05,
"loss": 0.6349,
"step": 5010
},
{
"epoch": 1.2919830137691417,
"grad_norm": 1.164996862411499,
"learning_rate": 3.707850707850708e-05,
"loss": 0.6836,
"step": 5020
},
{
"epoch": 1.2945566851113113,
"grad_norm": 4.410824298858643,
"learning_rate": 3.705276705276706e-05,
"loss": 0.7274,
"step": 5030
},
{
"epoch": 1.2971303564534808,
"grad_norm": 1.3747788667678833,
"learning_rate": 3.702702702702703e-05,
"loss": 0.713,
"step": 5040
},
{
"epoch": 1.2997040277956504,
"grad_norm": 3.5286660194396973,
"learning_rate": 3.700128700128701e-05,
"loss": 0.691,
"step": 5050
},
{
"epoch": 1.3022776991378202,
"grad_norm": 1.0159038305282593,
"learning_rate": 3.697554697554698e-05,
"loss": 0.7158,
"step": 5060
},
{
"epoch": 1.3048513704799898,
"grad_norm": 1.5588384866714478,
"learning_rate": 3.694980694980695e-05,
"loss": 0.7042,
"step": 5070
},
{
"epoch": 1.3074250418221594,
"grad_norm": 1.0354678630828857,
"learning_rate": 3.692406692406692e-05,
"loss": 0.6915,
"step": 5080
},
{
"epoch": 1.309998713164329,
"grad_norm": 3.5660855770111084,
"learning_rate": 3.68983268983269e-05,
"loss": 0.6111,
"step": 5090
},
{
"epoch": 1.3125723845064985,
"grad_norm": 1.010709285736084,
"learning_rate": 3.687258687258687e-05,
"loss": 0.6168,
"step": 5100
},
{
"epoch": 1.315146055848668,
"grad_norm": 2.2821505069732666,
"learning_rate": 3.684684684684685e-05,
"loss": 0.7227,
"step": 5110
},
{
"epoch": 1.3177197271908376,
"grad_norm": 0.8663123250007629,
"learning_rate": 3.682110682110682e-05,
"loss": 0.645,
"step": 5120
},
{
"epoch": 1.3202933985330074,
"grad_norm": 0.9988008141517639,
"learning_rate": 3.67953667953668e-05,
"loss": 0.7149,
"step": 5130
},
{
"epoch": 1.322867069875177,
"grad_norm": 1.1684341430664062,
"learning_rate": 3.676962676962677e-05,
"loss": 0.6919,
"step": 5140
},
{
"epoch": 1.3254407412173466,
"grad_norm": 0.9359066486358643,
"learning_rate": 3.6743886743886744e-05,
"loss": 0.6732,
"step": 5150
},
{
"epoch": 1.3280144125595161,
"grad_norm": 0.785476565361023,
"learning_rate": 3.6718146718146716e-05,
"loss": 0.7868,
"step": 5160
},
{
"epoch": 1.3305880839016857,
"grad_norm": 3.040825366973877,
"learning_rate": 3.6692406692406694e-05,
"loss": 0.704,
"step": 5170
},
{
"epoch": 1.3331617552438555,
"grad_norm": 1.1442739963531494,
"learning_rate": 3.6666666666666666e-05,
"loss": 0.7099,
"step": 5180
},
{
"epoch": 1.3357354265860248,
"grad_norm": 1.0111069679260254,
"learning_rate": 3.6640926640926644e-05,
"loss": 0.6733,
"step": 5190
},
{
"epoch": 1.3383090979281946,
"grad_norm": 0.922022819519043,
"learning_rate": 3.661518661518662e-05,
"loss": 0.7364,
"step": 5200
},
{
"epoch": 1.3408827692703642,
"grad_norm": 1.0119050741195679,
"learning_rate": 3.6589446589446594e-05,
"loss": 0.7414,
"step": 5210
},
{
"epoch": 1.3434564406125338,
"grad_norm": 1.0058611631393433,
"learning_rate": 3.6563706563706566e-05,
"loss": 0.7387,
"step": 5220
},
{
"epoch": 1.3460301119547033,
"grad_norm": 2.5699520111083984,
"learning_rate": 3.653796653796654e-05,
"loss": 0.6831,
"step": 5230
},
{
"epoch": 1.348603783296873,
"grad_norm": 0.723125159740448,
"learning_rate": 3.6512226512226516e-05,
"loss": 0.6642,
"step": 5240
},
{
"epoch": 1.3511774546390427,
"grad_norm": 0.9540688991546631,
"learning_rate": 3.648648648648649e-05,
"loss": 0.7312,
"step": 5250
},
{
"epoch": 1.3537511259812123,
"grad_norm": 1.160875916481018,
"learning_rate": 3.6460746460746466e-05,
"loss": 0.7023,
"step": 5260
},
{
"epoch": 1.3563247973233818,
"grad_norm": 1.0321364402770996,
"learning_rate": 3.643500643500644e-05,
"loss": 0.6955,
"step": 5270
},
{
"epoch": 1.3588984686655514,
"grad_norm": 0.8098282814025879,
"learning_rate": 3.6409266409266416e-05,
"loss": 0.6461,
"step": 5280
},
{
"epoch": 1.361472140007721,
"grad_norm": 1.080040454864502,
"learning_rate": 3.638352638352639e-05,
"loss": 0.7128,
"step": 5290
},
{
"epoch": 1.3640458113498906,
"grad_norm": 0.9302416443824768,
"learning_rate": 3.635778635778636e-05,
"loss": 0.7007,
"step": 5300
},
{
"epoch": 1.3666194826920601,
"grad_norm": 1.0521005392074585,
"learning_rate": 3.633204633204633e-05,
"loss": 0.6293,
"step": 5310
},
{
"epoch": 1.36919315403423,
"grad_norm": 1.0561240911483765,
"learning_rate": 3.630630630630631e-05,
"loss": 0.702,
"step": 5320
},
{
"epoch": 1.3717668253763995,
"grad_norm": 3.3522236347198486,
"learning_rate": 3.628056628056628e-05,
"loss": 0.6907,
"step": 5330
},
{
"epoch": 1.374340496718569,
"grad_norm": 1.2843527793884277,
"learning_rate": 3.625482625482626e-05,
"loss": 0.7478,
"step": 5340
},
{
"epoch": 1.3769141680607386,
"grad_norm": 0.7206987142562866,
"learning_rate": 3.622908622908623e-05,
"loss": 0.7065,
"step": 5350
},
{
"epoch": 1.3794878394029082,
"grad_norm": 2.9926466941833496,
"learning_rate": 3.620334620334621e-05,
"loss": 0.7363,
"step": 5360
},
{
"epoch": 1.3820615107450778,
"grad_norm": 3.599290609359741,
"learning_rate": 3.6177606177606174e-05,
"loss": 0.7904,
"step": 5370
},
{
"epoch": 1.3846351820872473,
"grad_norm": 1.1965876817703247,
"learning_rate": 3.615186615186615e-05,
"loss": 0.6879,
"step": 5380
},
{
"epoch": 1.3872088534294171,
"grad_norm": 0.8417690396308899,
"learning_rate": 3.6126126126126124e-05,
"loss": 0.7154,
"step": 5390
},
{
"epoch": 1.3897825247715867,
"grad_norm": 2.286024570465088,
"learning_rate": 3.61003861003861e-05,
"loss": 0.7073,
"step": 5400
},
{
"epoch": 1.3923561961137563,
"grad_norm": 0.7775086164474487,
"learning_rate": 3.607464607464608e-05,
"loss": 0.6656,
"step": 5410
},
{
"epoch": 1.3949298674559258,
"grad_norm": 0.9922925233840942,
"learning_rate": 3.604890604890605e-05,
"loss": 0.7027,
"step": 5420
},
{
"epoch": 1.3975035387980954,
"grad_norm": 3.689131021499634,
"learning_rate": 3.602316602316603e-05,
"loss": 0.6914,
"step": 5430
},
{
"epoch": 1.4000772101402652,
"grad_norm": 1.1228481531143188,
"learning_rate": 3.5997425997425995e-05,
"loss": 0.6495,
"step": 5440
},
{
"epoch": 1.4026508814824348,
"grad_norm": 1.0339312553405762,
"learning_rate": 3.5971685971685974e-05,
"loss": 0.6677,
"step": 5450
},
{
"epoch": 1.4052245528246043,
"grad_norm": 1.3286689519882202,
"learning_rate": 3.5945945945945945e-05,
"loss": 0.7425,
"step": 5460
},
{
"epoch": 1.407798224166774,
"grad_norm": 1.022578477859497,
"learning_rate": 3.5920205920205924e-05,
"loss": 0.6938,
"step": 5470
},
{
"epoch": 1.4103718955089435,
"grad_norm": 0.8292086124420166,
"learning_rate": 3.5894465894465895e-05,
"loss": 0.6519,
"step": 5480
},
{
"epoch": 1.412945566851113,
"grad_norm": 0.8953461647033691,
"learning_rate": 3.5868725868725874e-05,
"loss": 0.6347,
"step": 5490
},
{
"epoch": 1.4155192381932826,
"grad_norm": 1.0440467596054077,
"learning_rate": 3.5842985842985845e-05,
"loss": 0.6647,
"step": 5500
},
{
"epoch": 1.4180929095354524,
"grad_norm": 1.0310571193695068,
"learning_rate": 3.581724581724582e-05,
"loss": 0.7084,
"step": 5510
},
{
"epoch": 1.420666580877622,
"grad_norm": 1.1693068742752075,
"learning_rate": 3.579150579150579e-05,
"loss": 0.6517,
"step": 5520
},
{
"epoch": 1.4232402522197916,
"grad_norm": 0.7255993485450745,
"learning_rate": 3.576576576576577e-05,
"loss": 0.7003,
"step": 5530
},
{
"epoch": 1.4258139235619611,
"grad_norm": 0.8931599259376526,
"learning_rate": 3.574002574002574e-05,
"loss": 0.6894,
"step": 5540
},
{
"epoch": 1.4283875949041307,
"grad_norm": 1.551103115081787,
"learning_rate": 3.571428571428572e-05,
"loss": 0.7459,
"step": 5550
},
{
"epoch": 1.4309612662463003,
"grad_norm": 0.9363420605659485,
"learning_rate": 3.568854568854569e-05,
"loss": 0.7155,
"step": 5560
},
{
"epoch": 1.4335349375884698,
"grad_norm": 0.7762523889541626,
"learning_rate": 3.566280566280567e-05,
"loss": 0.6855,
"step": 5570
},
{
"epoch": 1.4361086089306396,
"grad_norm": 3.491201400756836,
"learning_rate": 3.563706563706564e-05,
"loss": 0.7322,
"step": 5580
},
{
"epoch": 1.4386822802728092,
"grad_norm": 1.4872835874557495,
"learning_rate": 3.561132561132561e-05,
"loss": 0.7379,
"step": 5590
},
{
"epoch": 1.4412559516149788,
"grad_norm": 3.8220291137695312,
"learning_rate": 3.558558558558558e-05,
"loss": 0.7969,
"step": 5600
},
{
"epoch": 1.4438296229571483,
"grad_norm": 1.0501987934112549,
"learning_rate": 3.555984555984556e-05,
"loss": 0.7007,
"step": 5610
},
{
"epoch": 1.446403294299318,
"grad_norm": 0.7753943204879761,
"learning_rate": 3.553410553410554e-05,
"loss": 0.6857,
"step": 5620
},
{
"epoch": 1.4489769656414877,
"grad_norm": 0.8087158799171448,
"learning_rate": 3.550836550836551e-05,
"loss": 0.6824,
"step": 5630
},
{
"epoch": 1.451550636983657,
"grad_norm": 1.105362892150879,
"learning_rate": 3.548262548262549e-05,
"loss": 0.6828,
"step": 5640
},
{
"epoch": 1.4541243083258268,
"grad_norm": 4.095690727233887,
"learning_rate": 3.545688545688546e-05,
"loss": 0.6885,
"step": 5650
},
{
"epoch": 1.4566979796679964,
"grad_norm": 1.0908024311065674,
"learning_rate": 3.543114543114543e-05,
"loss": 0.7319,
"step": 5660
},
{
"epoch": 1.459271651010166,
"grad_norm": 1.1036632061004639,
"learning_rate": 3.5405405405405403e-05,
"loss": 0.713,
"step": 5670
},
{
"epoch": 1.4618453223523356,
"grad_norm": 0.9031351208686829,
"learning_rate": 3.537966537966538e-05,
"loss": 0.6322,
"step": 5680
},
{
"epoch": 1.4644189936945051,
"grad_norm": 1.1566755771636963,
"learning_rate": 3.5353925353925353e-05,
"loss": 0.6899,
"step": 5690
},
{
"epoch": 1.466992665036675,
"grad_norm": 0.8092819452285767,
"learning_rate": 3.532818532818533e-05,
"loss": 0.6701,
"step": 5700
},
{
"epoch": 1.4695663363788445,
"grad_norm": 0.9954015612602234,
"learning_rate": 3.5302445302445303e-05,
"loss": 0.7205,
"step": 5710
},
{
"epoch": 1.472140007721014,
"grad_norm": 0.8541660308837891,
"learning_rate": 3.527670527670528e-05,
"loss": 0.6193,
"step": 5720
},
{
"epoch": 1.4747136790631836,
"grad_norm": 3.6179423332214355,
"learning_rate": 3.5250965250965253e-05,
"loss": 0.7084,
"step": 5730
},
{
"epoch": 1.4772873504053532,
"grad_norm": 1.010477900505066,
"learning_rate": 3.5225225225225225e-05,
"loss": 0.7267,
"step": 5740
},
{
"epoch": 1.4798610217475228,
"grad_norm": 0.8246733546257019,
"learning_rate": 3.51994851994852e-05,
"loss": 0.6874,
"step": 5750
},
{
"epoch": 1.4824346930896923,
"grad_norm": 0.7221471071243286,
"learning_rate": 3.5173745173745175e-05,
"loss": 0.7386,
"step": 5760
},
{
"epoch": 1.4850083644318621,
"grad_norm": 1.1807109117507935,
"learning_rate": 3.514800514800515e-05,
"loss": 0.7059,
"step": 5770
},
{
"epoch": 1.4875820357740317,
"grad_norm": 1.179556131362915,
"learning_rate": 3.5122265122265125e-05,
"loss": 0.675,
"step": 5780
},
{
"epoch": 1.4901557071162013,
"grad_norm": 3.564592123031616,
"learning_rate": 3.50965250965251e-05,
"loss": 0.628,
"step": 5790
},
{
"epoch": 1.4927293784583708,
"grad_norm": 0.8945732712745667,
"learning_rate": 3.5070785070785075e-05,
"loss": 0.7223,
"step": 5800
},
{
"epoch": 1.4953030498005404,
"grad_norm": 1.0145196914672852,
"learning_rate": 3.504504504504505e-05,
"loss": 0.6822,
"step": 5810
},
{
"epoch": 1.4978767211427102,
"grad_norm": 1.1235549449920654,
"learning_rate": 3.501930501930502e-05,
"loss": 0.6738,
"step": 5820
},
{
"epoch": 1.5004503924848795,
"grad_norm": 1.2400606870651245,
"learning_rate": 3.4993564993565e-05,
"loss": 0.6637,
"step": 5830
},
{
"epoch": 1.5030240638270493,
"grad_norm": 4.3333821296691895,
"learning_rate": 3.496782496782497e-05,
"loss": 0.6351,
"step": 5840
},
{
"epoch": 1.505597735169219,
"grad_norm": 1.126704216003418,
"learning_rate": 3.4942084942084947e-05,
"loss": 0.654,
"step": 5850
},
{
"epoch": 1.5081714065113885,
"grad_norm": 1.162214994430542,
"learning_rate": 3.491634491634492e-05,
"loss": 0.6906,
"step": 5860
},
{
"epoch": 1.510745077853558,
"grad_norm": 1.1057708263397217,
"learning_rate": 3.4890604890604897e-05,
"loss": 0.6567,
"step": 5870
},
{
"epoch": 1.5133187491957276,
"grad_norm": 2.668668270111084,
"learning_rate": 3.486486486486487e-05,
"loss": 0.7647,
"step": 5880
},
{
"epoch": 1.5158924205378974,
"grad_norm": 4.099859714508057,
"learning_rate": 3.483912483912484e-05,
"loss": 0.75,
"step": 5890
},
{
"epoch": 1.5184660918800668,
"grad_norm": 0.9541329145431519,
"learning_rate": 3.481338481338481e-05,
"loss": 0.7209,
"step": 5900
},
{
"epoch": 1.5210397632222366,
"grad_norm": 0.9891238212585449,
"learning_rate": 3.478764478764479e-05,
"loss": 0.6793,
"step": 5910
},
{
"epoch": 1.5236134345644061,
"grad_norm": 1.2427529096603394,
"learning_rate": 3.476190476190476e-05,
"loss": 0.7109,
"step": 5920
},
{
"epoch": 1.5261871059065757,
"grad_norm": 0.8798409104347229,
"learning_rate": 3.473616473616474e-05,
"loss": 0.7161,
"step": 5930
},
{
"epoch": 1.5287607772487455,
"grad_norm": 0.9166936278343201,
"learning_rate": 3.471042471042471e-05,
"loss": 0.6958,
"step": 5940
},
{
"epoch": 1.5313344485909148,
"grad_norm": 1.1786912679672241,
"learning_rate": 3.468468468468469e-05,
"loss": 0.6598,
"step": 5950
},
{
"epoch": 1.5339081199330846,
"grad_norm": 0.8544349670410156,
"learning_rate": 3.465894465894466e-05,
"loss": 0.6384,
"step": 5960
},
{
"epoch": 1.5364817912752542,
"grad_norm": 1.3337987661361694,
"learning_rate": 3.463320463320463e-05,
"loss": 0.72,
"step": 5970
},
{
"epoch": 1.5390554626174238,
"grad_norm": 1.6757917404174805,
"learning_rate": 3.4607464607464605e-05,
"loss": 0.6767,
"step": 5980
},
{
"epoch": 1.5416291339595933,
"grad_norm": 1.042203426361084,
"learning_rate": 3.458172458172458e-05,
"loss": 0.6199,
"step": 5990
},
{
"epoch": 1.544202805301763,
"grad_norm": 0.9380660057067871,
"learning_rate": 3.4555984555984555e-05,
"loss": 0.635,
"step": 6000
},
{
"epoch": 1.544202805301763,
"eval_loss": 0.7921908497810364,
"eval_runtime": 395.6711,
"eval_samples_per_second": 49.096,
"eval_steps_per_second": 2.457,
"eval_token_accuracy": 0.0003712448469848693,
"step": 6000
},
{
"epoch": 1.5467764766439327,
"grad_norm": 1.0668463706970215,
"learning_rate": 3.453024453024453e-05,
"loss": 0.7029,
"step": 6010
},
{
"epoch": 1.549350147986102,
"grad_norm": 0.9166046380996704,
"learning_rate": 3.4504504504504505e-05,
"loss": 0.7513,
"step": 6020
},
{
"epoch": 1.5519238193282718,
"grad_norm": 1.067365050315857,
"learning_rate": 3.447876447876448e-05,
"loss": 0.6937,
"step": 6030
},
{
"epoch": 1.5544974906704414,
"grad_norm": 1.0622658729553223,
"learning_rate": 3.4453024453024455e-05,
"loss": 0.7011,
"step": 6040
},
{
"epoch": 1.557071162012611,
"grad_norm": 0.8257264494895935,
"learning_rate": 3.4427284427284426e-05,
"loss": 0.6929,
"step": 6050
},
{
"epoch": 1.5596448333547805,
"grad_norm": 1.0909701585769653,
"learning_rate": 3.440411840411841e-05,
"loss": 0.6587,
"step": 6060
},
{
"epoch": 1.5622185046969501,
"grad_norm": 1.0562666654586792,
"learning_rate": 3.437837837837838e-05,
"loss": 0.7101,
"step": 6070
},
{
"epoch": 1.56479217603912,
"grad_norm": 0.9798877239227295,
"learning_rate": 3.4352638352638353e-05,
"loss": 0.7049,
"step": 6080
},
{
"epoch": 1.5673658473812893,
"grad_norm": 1.297472357749939,
"learning_rate": 3.4326898326898325e-05,
"loss": 0.7213,
"step": 6090
},
{
"epoch": 1.569939518723459,
"grad_norm": 0.9715744256973267,
"learning_rate": 3.4301158301158303e-05,
"loss": 0.667,
"step": 6100
},
{
"epoch": 1.5725131900656286,
"grad_norm": 4.2070817947387695,
"learning_rate": 3.4275418275418275e-05,
"loss": 0.6737,
"step": 6110
},
{
"epoch": 1.5750868614077982,
"grad_norm": 0.8832525014877319,
"learning_rate": 3.4249678249678253e-05,
"loss": 0.8282,
"step": 6120
},
{
"epoch": 1.577660532749968,
"grad_norm": 1.0799152851104736,
"learning_rate": 3.4223938223938225e-05,
"loss": 0.6991,
"step": 6130
},
{
"epoch": 1.5802342040921373,
"grad_norm": 0.986268162727356,
"learning_rate": 3.4198198198198203e-05,
"loss": 0.6998,
"step": 6140
},
{
"epoch": 1.5828078754343071,
"grad_norm": 1.0623130798339844,
"learning_rate": 3.417245817245817e-05,
"loss": 0.7329,
"step": 6150
},
{
"epoch": 1.5853815467764767,
"grad_norm": 0.9408344626426697,
"learning_rate": 3.414671814671815e-05,
"loss": 0.6361,
"step": 6160
},
{
"epoch": 1.5879552181186463,
"grad_norm": 1.0099743604660034,
"learning_rate": 3.412097812097812e-05,
"loss": 0.6459,
"step": 6170
},
{
"epoch": 1.5905288894608158,
"grad_norm": 1.1268500089645386,
"learning_rate": 3.40952380952381e-05,
"loss": 0.663,
"step": 6180
},
{
"epoch": 1.5931025608029854,
"grad_norm": 1.2413302659988403,
"learning_rate": 3.4069498069498075e-05,
"loss": 0.6333,
"step": 6190
},
{
"epoch": 1.5956762321451552,
"grad_norm": 1.3237611055374146,
"learning_rate": 3.404375804375805e-05,
"loss": 0.6517,
"step": 6200
},
{
"epoch": 1.5982499034873245,
"grad_norm": 0.8348039388656616,
"learning_rate": 3.4018018018018025e-05,
"loss": 0.6988,
"step": 6210
},
{
"epoch": 1.6008235748294943,
"grad_norm": 1.1034482717514038,
"learning_rate": 3.3992277992278e-05,
"loss": 0.6542,
"step": 6220
},
{
"epoch": 1.603397246171664,
"grad_norm": 0.817833662033081,
"learning_rate": 3.396653796653797e-05,
"loss": 0.6705,
"step": 6230
},
{
"epoch": 1.6059709175138335,
"grad_norm": 0.8758569955825806,
"learning_rate": 3.394079794079794e-05,
"loss": 0.6661,
"step": 6240
},
{
"epoch": 1.608544588856003,
"grad_norm": 1.172839641571045,
"learning_rate": 3.391505791505792e-05,
"loss": 0.6228,
"step": 6250
},
{
"epoch": 1.6111182601981726,
"grad_norm": 0.9346746802330017,
"learning_rate": 3.388931788931789e-05,
"loss": 0.6691,
"step": 6260
},
{
"epoch": 1.6136919315403424,
"grad_norm": 0.942145586013794,
"learning_rate": 3.386357786357787e-05,
"loss": 0.6838,
"step": 6270
},
{
"epoch": 1.6162656028825118,
"grad_norm": 4.026924133300781,
"learning_rate": 3.383783783783784e-05,
"loss": 0.6771,
"step": 6280
},
{
"epoch": 1.6188392742246815,
"grad_norm": 0.9714291095733643,
"learning_rate": 3.381209781209782e-05,
"loss": 0.6274,
"step": 6290
},
{
"epoch": 1.6214129455668511,
"grad_norm": 3.0900719165802,
"learning_rate": 3.378635778635778e-05,
"loss": 0.6962,
"step": 6300
},
{
"epoch": 1.6239866169090207,
"grad_norm": 1.203955888748169,
"learning_rate": 3.376061776061776e-05,
"loss": 0.7344,
"step": 6310
},
{
"epoch": 1.6265602882511905,
"grad_norm": 1.0462515354156494,
"learning_rate": 3.373487773487773e-05,
"loss": 0.698,
"step": 6320
},
{
"epoch": 1.6291339595933598,
"grad_norm": 0.8859325647354126,
"learning_rate": 3.370913770913771e-05,
"loss": 0.6949,
"step": 6330
},
{
"epoch": 1.6317076309355296,
"grad_norm": 1.0180076360702515,
"learning_rate": 3.368339768339768e-05,
"loss": 0.6681,
"step": 6340
},
{
"epoch": 1.6342813022776992,
"grad_norm": 3.5328500270843506,
"learning_rate": 3.365765765765766e-05,
"loss": 0.6147,
"step": 6350
},
{
"epoch": 1.6368549736198688,
"grad_norm": 0.9282681345939636,
"learning_rate": 3.363191763191763e-05,
"loss": 0.7505,
"step": 6360
},
{
"epoch": 1.6394286449620383,
"grad_norm": 0.9516205191612244,
"learning_rate": 3.3606177606177605e-05,
"loss": 0.7205,
"step": 6370
},
{
"epoch": 1.642002316304208,
"grad_norm": 1.07736337184906,
"learning_rate": 3.358043758043758e-05,
"loss": 0.6584,
"step": 6380
},
{
"epoch": 1.6445759876463777,
"grad_norm": 0.8120790123939514,
"learning_rate": 3.3554697554697555e-05,
"loss": 0.6473,
"step": 6390
},
{
"epoch": 1.647149658988547,
"grad_norm": 0.781129777431488,
"learning_rate": 3.352895752895753e-05,
"loss": 0.673,
"step": 6400
},
{
"epoch": 1.6497233303307168,
"grad_norm": 1.0196880102157593,
"learning_rate": 3.3503217503217505e-05,
"loss": 0.6674,
"step": 6410
},
{
"epoch": 1.6522970016728864,
"grad_norm": 1.0872300863265991,
"learning_rate": 3.347747747747748e-05,
"loss": 0.674,
"step": 6420
},
{
"epoch": 1.654870673015056,
"grad_norm": 0.8442071676254272,
"learning_rate": 3.3451737451737455e-05,
"loss": 0.6944,
"step": 6430
},
{
"epoch": 1.6574443443572255,
"grad_norm": 0.9583492279052734,
"learning_rate": 3.3425997425997426e-05,
"loss": 0.7546,
"step": 6440
},
{
"epoch": 1.660018015699395,
"grad_norm": 1.018364667892456,
"learning_rate": 3.34002574002574e-05,
"loss": 0.6987,
"step": 6450
},
{
"epoch": 1.662591687041565,
"grad_norm": 0.833738386631012,
"learning_rate": 3.3374517374517376e-05,
"loss": 0.7055,
"step": 6460
},
{
"epoch": 1.6651653583837343,
"grad_norm": 1.014647126197815,
"learning_rate": 3.334877734877735e-05,
"loss": 0.7545,
"step": 6470
},
{
"epoch": 1.667739029725904,
"grad_norm": 1.2637064456939697,
"learning_rate": 3.3323037323037326e-05,
"loss": 0.728,
"step": 6480
},
{
"epoch": 1.6703127010680736,
"grad_norm": 0.9318623542785645,
"learning_rate": 3.32972972972973e-05,
"loss": 0.663,
"step": 6490
},
{
"epoch": 1.6728863724102432,
"grad_norm": 1.4659100770950317,
"learning_rate": 3.3271557271557276e-05,
"loss": 0.7003,
"step": 6500
},
{
"epoch": 1.675460043752413,
"grad_norm": 0.7381783127784729,
"learning_rate": 3.324581724581725e-05,
"loss": 0.7492,
"step": 6510
},
{
"epoch": 1.6780337150945823,
"grad_norm": 0.9546041488647461,
"learning_rate": 3.322007722007722e-05,
"loss": 0.6817,
"step": 6520
},
{
"epoch": 1.6806073864367521,
"grad_norm": 0.8316722512245178,
"learning_rate": 3.319433719433719e-05,
"loss": 0.7269,
"step": 6530
},
{
"epoch": 1.6831810577789217,
"grad_norm": 0.8137519955635071,
"learning_rate": 3.316859716859717e-05,
"loss": 0.7524,
"step": 6540
},
{
"epoch": 1.6857547291210913,
"grad_norm": 0.772885262966156,
"learning_rate": 3.314285714285714e-05,
"loss": 0.661,
"step": 6550
},
{
"epoch": 1.6883284004632608,
"grad_norm": 0.9286116361618042,
"learning_rate": 3.311711711711712e-05,
"loss": 0.7038,
"step": 6560
},
{
"epoch": 1.6909020718054304,
"grad_norm": 1.0171113014221191,
"learning_rate": 3.309137709137709e-05,
"loss": 0.715,
"step": 6570
},
{
"epoch": 1.6934757431476002,
"grad_norm": 1.098702311515808,
"learning_rate": 3.306563706563707e-05,
"loss": 0.673,
"step": 6580
},
{
"epoch": 1.6960494144897695,
"grad_norm": 0.9981555342674255,
"learning_rate": 3.303989703989704e-05,
"loss": 0.6945,
"step": 6590
},
{
"epoch": 1.6986230858319393,
"grad_norm": 1.3243064880371094,
"learning_rate": 3.301415701415701e-05,
"loss": 0.7054,
"step": 6600
},
{
"epoch": 1.701196757174109,
"grad_norm": 2.013225555419922,
"learning_rate": 3.298841698841699e-05,
"loss": 0.6962,
"step": 6610
},
{
"epoch": 1.7037704285162785,
"grad_norm": 0.7733390927314758,
"learning_rate": 3.296267696267696e-05,
"loss": 0.6986,
"step": 6620
},
{
"epoch": 1.706344099858448,
"grad_norm": 0.9102844595909119,
"learning_rate": 3.293693693693694e-05,
"loss": 0.7269,
"step": 6630
},
{
"epoch": 1.7089177712006176,
"grad_norm": 0.9313980340957642,
"learning_rate": 3.291119691119691e-05,
"loss": 0.6498,
"step": 6640
},
{
"epoch": 1.7114914425427874,
"grad_norm": 0.9392831325531006,
"learning_rate": 3.288545688545689e-05,
"loss": 0.7894,
"step": 6650
},
{
"epoch": 1.7140651138849567,
"grad_norm": 1.059606909751892,
"learning_rate": 3.285971685971686e-05,
"loss": 0.6854,
"step": 6660
},
{
"epoch": 1.7166387852271265,
"grad_norm": 0.7358580827713013,
"learning_rate": 3.2833976833976834e-05,
"loss": 0.6555,
"step": 6670
},
{
"epoch": 1.719212456569296,
"grad_norm": 0.9238846302032471,
"learning_rate": 3.2808236808236806e-05,
"loss": 0.7385,
"step": 6680
},
{
"epoch": 1.7217861279114657,
"grad_norm": 2.5383057594299316,
"learning_rate": 3.2782496782496784e-05,
"loss": 0.6411,
"step": 6690
},
{
"epoch": 1.7243597992536355,
"grad_norm": 1.0750601291656494,
"learning_rate": 3.2756756756756756e-05,
"loss": 0.6677,
"step": 6700
},
{
"epoch": 1.7269334705958048,
"grad_norm": 3.5669894218444824,
"learning_rate": 3.2731016731016734e-05,
"loss": 0.7254,
"step": 6710
},
{
"epoch": 1.7295071419379746,
"grad_norm": 1.3590166568756104,
"learning_rate": 3.2705276705276706e-05,
"loss": 0.5707,
"step": 6720
},
{
"epoch": 1.732080813280144,
"grad_norm": 3.761894702911377,
"learning_rate": 3.2679536679536684e-05,
"loss": 0.654,
"step": 6730
},
{
"epoch": 1.7346544846223138,
"grad_norm": 1.5816675424575806,
"learning_rate": 3.2653796653796656e-05,
"loss": 0.7373,
"step": 6740
},
{
"epoch": 1.7372281559644833,
"grad_norm": 1.1728434562683105,
"learning_rate": 3.262805662805663e-05,
"loss": 0.6679,
"step": 6750
},
{
"epoch": 1.739801827306653,
"grad_norm": 1.1865681409835815,
"learning_rate": 3.26023166023166e-05,
"loss": 0.6444,
"step": 6760
},
{
"epoch": 1.7423754986488227,
"grad_norm": 3.5320911407470703,
"learning_rate": 3.257657657657658e-05,
"loss": 0.7412,
"step": 6770
},
{
"epoch": 1.744949169990992,
"grad_norm": 0.9932678937911987,
"learning_rate": 3.255083655083655e-05,
"loss": 0.6619,
"step": 6780
},
{
"epoch": 1.7475228413331618,
"grad_norm": 0.9329503774642944,
"learning_rate": 3.252509652509653e-05,
"loss": 0.668,
"step": 6790
},
{
"epoch": 1.7500965126753314,
"grad_norm": 0.91359943151474,
"learning_rate": 3.2499356499356506e-05,
"loss": 0.7856,
"step": 6800
},
{
"epoch": 1.752670184017501,
"grad_norm": 2.197328567504883,
"learning_rate": 3.247361647361648e-05,
"loss": 0.7182,
"step": 6810
},
{
"epoch": 1.7552438553596705,
"grad_norm": 1.2823790311813354,
"learning_rate": 3.244787644787645e-05,
"loss": 0.7202,
"step": 6820
},
{
"epoch": 1.75781752670184,
"grad_norm": 1.1787232160568237,
"learning_rate": 3.242213642213642e-05,
"loss": 0.6817,
"step": 6830
},
{
"epoch": 1.76039119804401,
"grad_norm": 0.9441166520118713,
"learning_rate": 3.23963963963964e-05,
"loss": 0.629,
"step": 6840
},
{
"epoch": 1.7629648693861792,
"grad_norm": 2.959947109222412,
"learning_rate": 3.237065637065637e-05,
"loss": 0.6758,
"step": 6850
},
{
"epoch": 1.765538540728349,
"grad_norm": 0.9195284247398376,
"learning_rate": 3.234491634491635e-05,
"loss": 0.6811,
"step": 6860
},
{
"epoch": 1.7681122120705186,
"grad_norm": 1.753348708152771,
"learning_rate": 3.231917631917632e-05,
"loss": 0.6048,
"step": 6870
},
{
"epoch": 1.7706858834126882,
"grad_norm": 0.8950250744819641,
"learning_rate": 3.22934362934363e-05,
"loss": 0.7056,
"step": 6880
},
{
"epoch": 1.773259554754858,
"grad_norm": 0.9784029722213745,
"learning_rate": 3.226769626769627e-05,
"loss": 0.664,
"step": 6890
},
{
"epoch": 1.7758332260970273,
"grad_norm": 3.3027687072753906,
"learning_rate": 3.224195624195624e-05,
"loss": 0.6592,
"step": 6900
},
{
"epoch": 1.778406897439197,
"grad_norm": 2.5597102642059326,
"learning_rate": 3.2216216216216214e-05,
"loss": 0.6791,
"step": 6910
},
{
"epoch": 1.7809805687813665,
"grad_norm": 1.0470184087753296,
"learning_rate": 3.219047619047619e-05,
"loss": 0.6964,
"step": 6920
},
{
"epoch": 1.7835542401235362,
"grad_norm": 2.2413344383239746,
"learning_rate": 3.2164736164736164e-05,
"loss": 0.6795,
"step": 6930
},
{
"epoch": 1.7861279114657058,
"grad_norm": 1.297616958618164,
"learning_rate": 3.213899613899614e-05,
"loss": 0.6991,
"step": 6940
},
{
"epoch": 1.7887015828078754,
"grad_norm": 0.9208124279975891,
"learning_rate": 3.2113256113256114e-05,
"loss": 0.706,
"step": 6950
},
{
"epoch": 1.7912752541500452,
"grad_norm": 1.298047423362732,
"learning_rate": 3.208751608751609e-05,
"loss": 0.6466,
"step": 6960
},
{
"epoch": 1.7938489254922145,
"grad_norm": 0.9276260733604431,
"learning_rate": 3.2061776061776064e-05,
"loss": 0.6572,
"step": 6970
},
{
"epoch": 1.7964225968343843,
"grad_norm": 0.9074581861495972,
"learning_rate": 3.2036036036036036e-05,
"loss": 0.743,
"step": 6980
},
{
"epoch": 1.798996268176554,
"grad_norm": 0.9442815184593201,
"learning_rate": 3.201029601029601e-05,
"loss": 0.7033,
"step": 6990
},
{
"epoch": 1.8015699395187235,
"grad_norm": 1.1410044431686401,
"learning_rate": 3.1984555984555986e-05,
"loss": 0.7442,
"step": 7000
},
{
"epoch": 1.8015699395187235,
"eval_loss": 0.7747899889945984,
"eval_runtime": 395.6047,
"eval_samples_per_second": 49.105,
"eval_steps_per_second": 2.457,
"eval_token_accuracy": 0.00037655919039774065,
"step": 7000
},
{
"epoch": 1.804143610860893,
"grad_norm": 0.8645033836364746,
"learning_rate": 3.1958815958815964e-05,
"loss": 0.6423,
"step": 7010
},
{
"epoch": 1.8067172822030626,
"grad_norm": 1.2555421590805054,
"learning_rate": 3.1933075933075936e-05,
"loss": 0.5997,
"step": 7020
},
{
"epoch": 1.8092909535452324,
"grad_norm": 2.7066619396209717,
"learning_rate": 3.1907335907335914e-05,
"loss": 0.659,
"step": 7030
},
{
"epoch": 1.8118646248874017,
"grad_norm": 0.806398868560791,
"learning_rate": 3.1881595881595886e-05,
"loss": 0.6708,
"step": 7040
},
{
"epoch": 1.8144382962295715,
"grad_norm": 3.4134740829467773,
"learning_rate": 3.185585585585586e-05,
"loss": 0.6872,
"step": 7050
},
{
"epoch": 1.817011967571741,
"grad_norm": 1.0480446815490723,
"learning_rate": 3.183011583011583e-05,
"loss": 0.6778,
"step": 7060
},
{
"epoch": 1.8195856389139107,
"grad_norm": 0.9471696019172668,
"learning_rate": 3.180437580437581e-05,
"loss": 0.6911,
"step": 7070
},
{
"epoch": 1.8221593102560802,
"grad_norm": 4.224599361419678,
"learning_rate": 3.177863577863578e-05,
"loss": 0.7447,
"step": 7080
},
{
"epoch": 1.8247329815982498,
"grad_norm": 1.0805798768997192,
"learning_rate": 3.175289575289576e-05,
"loss": 0.704,
"step": 7090
},
{
"epoch": 1.8273066529404196,
"grad_norm": 1.3605204820632935,
"learning_rate": 3.172715572715573e-05,
"loss": 0.7009,
"step": 7100
},
{
"epoch": 1.829880324282589,
"grad_norm": 1.358485460281372,
"learning_rate": 3.170141570141571e-05,
"loss": 0.6511,
"step": 7110
},
{
"epoch": 1.8324539956247587,
"grad_norm": 1.068803071975708,
"learning_rate": 3.167567567567568e-05,
"loss": 0.662,
"step": 7120
},
{
"epoch": 1.8350276669669283,
"grad_norm": 1.6438531875610352,
"learning_rate": 3.164993564993565e-05,
"loss": 0.7509,
"step": 7130
},
{
"epoch": 1.8376013383090979,
"grad_norm": 0.9765356183052063,
"learning_rate": 3.162419562419562e-05,
"loss": 0.6796,
"step": 7140
},
{
"epoch": 1.8401750096512677,
"grad_norm": 0.8123814463615417,
"learning_rate": 3.15984555984556e-05,
"loss": 0.6197,
"step": 7150
},
{
"epoch": 1.842748680993437,
"grad_norm": 0.9948647618293762,
"learning_rate": 3.157271557271557e-05,
"loss": 0.7236,
"step": 7160
},
{
"epoch": 1.8453223523356068,
"grad_norm": 0.8912683725357056,
"learning_rate": 3.154697554697555e-05,
"loss": 0.666,
"step": 7170
},
{
"epoch": 1.8478960236777764,
"grad_norm": 1.1189854145050049,
"learning_rate": 3.152123552123552e-05,
"loss": 0.7255,
"step": 7180
},
{
"epoch": 1.850469695019946,
"grad_norm": 1.395694375038147,
"learning_rate": 3.14954954954955e-05,
"loss": 0.6849,
"step": 7190
},
{
"epoch": 1.8530433663621155,
"grad_norm": 0.9775906205177307,
"learning_rate": 3.1469755469755465e-05,
"loss": 0.7228,
"step": 7200
},
{
"epoch": 1.855617037704285,
"grad_norm": 1.1532434225082397,
"learning_rate": 3.1444015444015444e-05,
"loss": 0.7402,
"step": 7210
},
{
"epoch": 1.858190709046455,
"grad_norm": 0.9169228076934814,
"learning_rate": 3.141827541827542e-05,
"loss": 0.6879,
"step": 7220
},
{
"epoch": 1.8607643803886242,
"grad_norm": 1.0046635866165161,
"learning_rate": 3.1392535392535394e-05,
"loss": 0.6767,
"step": 7230
},
{
"epoch": 1.863338051730794,
"grad_norm": 0.9030658602714539,
"learning_rate": 3.136679536679537e-05,
"loss": 0.6526,
"step": 7240
},
{
"epoch": 1.8659117230729636,
"grad_norm": 0.8485135436058044,
"learning_rate": 3.1341055341055344e-05,
"loss": 0.6785,
"step": 7250
},
{
"epoch": 1.8684853944151332,
"grad_norm": 1.1969527006149292,
"learning_rate": 3.131531531531532e-05,
"loss": 0.7079,
"step": 7260
},
{
"epoch": 1.8710590657573027,
"grad_norm": 1.0521395206451416,
"learning_rate": 3.1289575289575294e-05,
"loss": 0.6408,
"step": 7270
},
{
"epoch": 1.8736327370994723,
"grad_norm": 1.203535556793213,
"learning_rate": 3.1263835263835265e-05,
"loss": 0.6577,
"step": 7280
},
{
"epoch": 1.876206408441642,
"grad_norm": 1.180917501449585,
"learning_rate": 3.123809523809524e-05,
"loss": 0.7297,
"step": 7290
},
{
"epoch": 1.8787800797838115,
"grad_norm": 1.5537686347961426,
"learning_rate": 3.1212355212355215e-05,
"loss": 0.7638,
"step": 7300
},
{
"epoch": 1.8813537511259812,
"grad_norm": 1.0814871788024902,
"learning_rate": 3.118661518661519e-05,
"loss": 0.6378,
"step": 7310
},
{
"epoch": 1.8839274224681508,
"grad_norm": 7.449766159057617,
"learning_rate": 3.1160875160875165e-05,
"loss": 0.7747,
"step": 7320
},
{
"epoch": 1.8865010938103204,
"grad_norm": 1.0792584419250488,
"learning_rate": 3.113513513513514e-05,
"loss": 0.7314,
"step": 7330
},
{
"epoch": 1.8890747651524902,
"grad_norm": 0.8132847547531128,
"learning_rate": 3.1109395109395115e-05,
"loss": 0.7088,
"step": 7340
},
{
"epoch": 1.8916484364946595,
"grad_norm": 0.9090826511383057,
"learning_rate": 3.108365508365508e-05,
"loss": 0.6871,
"step": 7350
},
{
"epoch": 1.8942221078368293,
"grad_norm": 0.9906513094902039,
"learning_rate": 3.105791505791506e-05,
"loss": 0.6722,
"step": 7360
},
{
"epoch": 1.8967957791789989,
"grad_norm": 1.0088332891464233,
"learning_rate": 3.103217503217503e-05,
"loss": 0.6669,
"step": 7370
},
{
"epoch": 1.8993694505211685,
"grad_norm": 3.6802520751953125,
"learning_rate": 3.100643500643501e-05,
"loss": 0.6908,
"step": 7380
},
{
"epoch": 1.901943121863338,
"grad_norm": 1.407848596572876,
"learning_rate": 3.098069498069498e-05,
"loss": 0.6203,
"step": 7390
},
{
"epoch": 1.9045167932055076,
"grad_norm": 1.0382202863693237,
"learning_rate": 3.095495495495496e-05,
"loss": 0.6618,
"step": 7400
},
{
"epoch": 1.9070904645476774,
"grad_norm": 1.39858877658844,
"learning_rate": 3.092921492921493e-05,
"loss": 0.6262,
"step": 7410
},
{
"epoch": 1.9096641358898467,
"grad_norm": 0.9530224204063416,
"learning_rate": 3.09034749034749e-05,
"loss": 0.7161,
"step": 7420
},
{
"epoch": 1.9122378072320165,
"grad_norm": 1.1237123012542725,
"learning_rate": 3.087773487773488e-05,
"loss": 0.7333,
"step": 7430
},
{
"epoch": 1.914811478574186,
"grad_norm": 2.556382417678833,
"learning_rate": 3.085199485199485e-05,
"loss": 0.6714,
"step": 7440
},
{
"epoch": 1.9173851499163557,
"grad_norm": 0.9731000065803528,
"learning_rate": 3.082625482625483e-05,
"loss": 0.6965,
"step": 7450
},
{
"epoch": 1.9199588212585252,
"grad_norm": 3.8856775760650635,
"learning_rate": 3.08005148005148e-05,
"loss": 0.6913,
"step": 7460
},
{
"epoch": 1.9225324926006948,
"grad_norm": 0.8493612408638,
"learning_rate": 3.077477477477478e-05,
"loss": 0.6683,
"step": 7470
},
{
"epoch": 1.9251061639428646,
"grad_norm": 3.1507325172424316,
"learning_rate": 3.074903474903475e-05,
"loss": 0.6804,
"step": 7480
},
{
"epoch": 1.927679835285034,
"grad_norm": 1.0596544742584229,
"learning_rate": 3.072329472329472e-05,
"loss": 0.6675,
"step": 7490
},
{
"epoch": 1.9302535066272037,
"grad_norm": 0.8844677209854126,
"learning_rate": 3.0697554697554695e-05,
"loss": 0.7052,
"step": 7500
},
{
"epoch": 1.9328271779693733,
"grad_norm": 3.017733335494995,
"learning_rate": 3.067181467181467e-05,
"loss": 0.6967,
"step": 7510
},
{
"epoch": 1.9354008493115429,
"grad_norm": 2.120694875717163,
"learning_rate": 3.0646074646074645e-05,
"loss": 0.7005,
"step": 7520
},
{
"epoch": 1.9379745206537127,
"grad_norm": 0.8085142970085144,
"learning_rate": 3.062033462033462e-05,
"loss": 0.7189,
"step": 7530
},
{
"epoch": 1.940548191995882,
"grad_norm": 0.9313369393348694,
"learning_rate": 3.0594594594594595e-05,
"loss": 0.6743,
"step": 7540
},
{
"epoch": 1.9431218633380518,
"grad_norm": 1.2053954601287842,
"learning_rate": 3.056885456885457e-05,
"loss": 0.6897,
"step": 7550
},
{
"epoch": 1.9456955346802214,
"grad_norm": 0.8504372239112854,
"learning_rate": 3.0543114543114545e-05,
"loss": 0.6518,
"step": 7560
},
{
"epoch": 1.948269206022391,
"grad_norm": 3.263662576675415,
"learning_rate": 3.051737451737452e-05,
"loss": 0.6482,
"step": 7570
},
{
"epoch": 1.9508428773645605,
"grad_norm": 0.7058959007263184,
"learning_rate": 3.049163449163449e-05,
"loss": 0.6701,
"step": 7580
},
{
"epoch": 1.95341654870673,
"grad_norm": 0.9414685964584351,
"learning_rate": 3.0465894465894466e-05,
"loss": 0.6468,
"step": 7590
},
{
"epoch": 1.9559902200488999,
"grad_norm": 0.9003808498382568,
"learning_rate": 3.0440154440154438e-05,
"loss": 0.7726,
"step": 7600
},
{
"epoch": 1.9585638913910692,
"grad_norm": 0.8774452209472656,
"learning_rate": 3.0414414414414416e-05,
"loss": 0.6717,
"step": 7610
},
{
"epoch": 1.961137562733239,
"grad_norm": 1.403334379196167,
"learning_rate": 3.0388674388674388e-05,
"loss": 0.7249,
"step": 7620
},
{
"epoch": 1.9637112340754086,
"grad_norm": 0.7909294962882996,
"learning_rate": 3.0362934362934363e-05,
"loss": 0.6854,
"step": 7630
},
{
"epoch": 1.9662849054175782,
"grad_norm": 0.72877037525177,
"learning_rate": 3.033719433719434e-05,
"loss": 0.6771,
"step": 7640
},
{
"epoch": 1.9688585767597477,
"grad_norm": 1.0888760089874268,
"learning_rate": 3.0311454311454313e-05,
"loss": 0.723,
"step": 7650
},
{
"epoch": 1.9714322481019173,
"grad_norm": 1.091776728630066,
"learning_rate": 3.0285714285714288e-05,
"loss": 0.7481,
"step": 7660
},
{
"epoch": 1.974005919444087,
"grad_norm": 1.1127774715423584,
"learning_rate": 3.025997425997426e-05,
"loss": 0.6342,
"step": 7670
},
{
"epoch": 1.9765795907862564,
"grad_norm": 1.1457735300064087,
"learning_rate": 3.0234234234234238e-05,
"loss": 0.6916,
"step": 7680
},
{
"epoch": 1.9791532621284262,
"grad_norm": 1.0062847137451172,
"learning_rate": 3.020849420849421e-05,
"loss": 0.6477,
"step": 7690
},
{
"epoch": 1.9817269334705958,
"grad_norm": 0.9393907785415649,
"learning_rate": 3.0182754182754185e-05,
"loss": 0.6596,
"step": 7700
},
{
"epoch": 1.9843006048127654,
"grad_norm": 0.8132648468017578,
"learning_rate": 3.0157014157014156e-05,
"loss": 0.7459,
"step": 7710
},
{
"epoch": 1.9868742761549352,
"grad_norm": 0.8783884644508362,
"learning_rate": 3.0131274131274135e-05,
"loss": 0.6531,
"step": 7720
},
{
"epoch": 1.9894479474971045,
"grad_norm": 0.982463538646698,
"learning_rate": 3.0105534105534106e-05,
"loss": 0.6378,
"step": 7730
},
{
"epoch": 1.9920216188392743,
"grad_norm": 2.940614700317383,
"learning_rate": 3.007979407979408e-05,
"loss": 0.6651,
"step": 7740
},
{
"epoch": 1.9945952901814439,
"grad_norm": 1.3412425518035889,
"learning_rate": 3.0054054054054053e-05,
"loss": 0.7025,
"step": 7750
},
{
"epoch": 1.9971689615236135,
"grad_norm": 4.413862228393555,
"learning_rate": 3.002831402831403e-05,
"loss": 0.7143,
"step": 7760
},
{
"epoch": 1.999742632865783,
"grad_norm": 1.016687035560608,
"learning_rate": 3.0002574002574003e-05,
"loss": 0.6285,
"step": 7770
},
{
"epoch": 2.0023163042079526,
"grad_norm": 2.836697816848755,
"learning_rate": 2.9976833976833978e-05,
"loss": 0.584,
"step": 7780
},
{
"epoch": 2.0048899755501224,
"grad_norm": 1.2601174116134644,
"learning_rate": 2.995109395109395e-05,
"loss": 0.509,
"step": 7790
},
{
"epoch": 2.0074636468922917,
"grad_norm": 0.9443026781082153,
"learning_rate": 2.9925353925353928e-05,
"loss": 0.5627,
"step": 7800
},
{
"epoch": 2.0100373182344615,
"grad_norm": 1.8750293254852295,
"learning_rate": 2.98996138996139e-05,
"loss": 0.544,
"step": 7810
},
{
"epoch": 2.012610989576631,
"grad_norm": 1.145896553993225,
"learning_rate": 2.9873873873873875e-05,
"loss": 0.5443,
"step": 7820
},
{
"epoch": 2.0151846609188007,
"grad_norm": 1.1090469360351562,
"learning_rate": 2.9848133848133846e-05,
"loss": 0.5265,
"step": 7830
},
{
"epoch": 2.0177583322609705,
"grad_norm": 0.9929465651512146,
"learning_rate": 2.9822393822393825e-05,
"loss": 0.5974,
"step": 7840
},
{
"epoch": 2.02033200360314,
"grad_norm": 0.9841685891151428,
"learning_rate": 2.97966537966538e-05,
"loss": 0.5093,
"step": 7850
},
{
"epoch": 2.0229056749453096,
"grad_norm": 0.946467399597168,
"learning_rate": 2.977091377091377e-05,
"loss": 0.4556,
"step": 7860
},
{
"epoch": 2.025479346287479,
"grad_norm": 3.8299388885498047,
"learning_rate": 2.974517374517375e-05,
"loss": 0.5295,
"step": 7870
},
{
"epoch": 2.0280530176296487,
"grad_norm": 0.9343544840812683,
"learning_rate": 2.971943371943372e-05,
"loss": 0.4939,
"step": 7880
},
{
"epoch": 2.030626688971818,
"grad_norm": 1.0280673503875732,
"learning_rate": 2.9693693693693696e-05,
"loss": 0.55,
"step": 7890
},
{
"epoch": 2.033200360313988,
"grad_norm": 1.1726477146148682,
"learning_rate": 2.9667953667953668e-05,
"loss": 0.5473,
"step": 7900
},
{
"epoch": 2.0357740316561577,
"grad_norm": 1.583446741104126,
"learning_rate": 2.9642213642213646e-05,
"loss": 0.5337,
"step": 7910
},
{
"epoch": 2.038347702998327,
"grad_norm": 1.0653187036514282,
"learning_rate": 2.9616473616473618e-05,
"loss": 0.5612,
"step": 7920
},
{
"epoch": 2.040921374340497,
"grad_norm": 1.5087721347808838,
"learning_rate": 2.9590733590733593e-05,
"loss": 0.5698,
"step": 7930
},
{
"epoch": 2.043495045682666,
"grad_norm": 2.7015230655670166,
"learning_rate": 2.9564993564993564e-05,
"loss": 0.5721,
"step": 7940
},
{
"epoch": 2.046068717024836,
"grad_norm": 2.5739264488220215,
"learning_rate": 2.9539253539253543e-05,
"loss": 0.5572,
"step": 7950
},
{
"epoch": 2.0486423883670057,
"grad_norm": 1.3727151155471802,
"learning_rate": 2.9513513513513514e-05,
"loss": 0.5949,
"step": 7960
},
{
"epoch": 2.051216059709175,
"grad_norm": 1.1039259433746338,
"learning_rate": 2.948777348777349e-05,
"loss": 0.5954,
"step": 7970
},
{
"epoch": 2.053789731051345,
"grad_norm": 3.637061834335327,
"learning_rate": 2.946203346203346e-05,
"loss": 0.6065,
"step": 7980
},
{
"epoch": 2.0563634023935142,
"grad_norm": 1.178566336631775,
"learning_rate": 2.943629343629344e-05,
"loss": 0.5253,
"step": 7990
},
{
"epoch": 2.058937073735684,
"grad_norm": 0.8954353332519531,
"learning_rate": 2.941055341055341e-05,
"loss": 0.5421,
"step": 8000
},
{
"epoch": 2.058937073735684,
"eval_loss": 0.7958057522773743,
"eval_runtime": 395.6991,
"eval_samples_per_second": 49.093,
"eval_steps_per_second": 2.456,
"eval_token_accuracy": 0.0003653611096349046,
"step": 8000
},
{
"epoch": 2.0615107450778534,
"grad_norm": 1.133931040763855,
"learning_rate": 2.9384813384813386e-05,
"loss": 0.5644,
"step": 8010
},
{
"epoch": 2.064084416420023,
"grad_norm": 1.2688968181610107,
"learning_rate": 2.9359073359073358e-05,
"loss": 0.5866,
"step": 8020
},
{
"epoch": 2.066658087762193,
"grad_norm": 0.861214816570282,
"learning_rate": 2.9333333333333336e-05,
"loss": 0.5449,
"step": 8030
},
{
"epoch": 2.0692317591043623,
"grad_norm": 0.9040514230728149,
"learning_rate": 2.9307593307593308e-05,
"loss": 0.5398,
"step": 8040
},
{
"epoch": 2.071805430446532,
"grad_norm": 1.010221004486084,
"learning_rate": 2.9281853281853283e-05,
"loss": 0.5111,
"step": 8050
},
{
"epoch": 2.0743791017887014,
"grad_norm": 0.7724061012268066,
"learning_rate": 2.925611325611326e-05,
"loss": 0.5378,
"step": 8060
},
{
"epoch": 2.0769527731308712,
"grad_norm": 1.2511149644851685,
"learning_rate": 2.9230373230373233e-05,
"loss": 0.5746,
"step": 8070
},
{
"epoch": 2.0795264444730406,
"grad_norm": 1.4768840074539185,
"learning_rate": 2.9204633204633208e-05,
"loss": 0.5502,
"step": 8080
},
{
"epoch": 2.0821001158152104,
"grad_norm": 1.175214171409607,
"learning_rate": 2.917889317889318e-05,
"loss": 0.5351,
"step": 8090
},
{
"epoch": 2.08467378715738,
"grad_norm": 0.9883387088775635,
"learning_rate": 2.9153153153153158e-05,
"loss": 0.5447,
"step": 8100
},
{
"epoch": 2.0872474584995495,
"grad_norm": 1.3396533727645874,
"learning_rate": 2.912998712998713e-05,
"loss": 0.641,
"step": 8110
},
{
"epoch": 2.0898211298417193,
"grad_norm": 2.0846426486968994,
"learning_rate": 2.910682110682111e-05,
"loss": 0.5135,
"step": 8120
},
{
"epoch": 2.0923948011838887,
"grad_norm": 2.5317373275756836,
"learning_rate": 2.9081081081081087e-05,
"loss": 0.5579,
"step": 8130
},
{
"epoch": 2.0949684725260584,
"grad_norm": 3.6536707878112793,
"learning_rate": 2.9055341055341055e-05,
"loss": 0.5584,
"step": 8140
},
{
"epoch": 2.0975421438682282,
"grad_norm": 0.9044039249420166,
"learning_rate": 2.9029601029601033e-05,
"loss": 0.546,
"step": 8150
},
{
"epoch": 2.1001158152103976,
"grad_norm": 1.1351265907287598,
"learning_rate": 2.9003861003861005e-05,
"loss": 0.6126,
"step": 8160
},
{
"epoch": 2.1026894865525674,
"grad_norm": 1.3066338300704956,
"learning_rate": 2.897812097812098e-05,
"loss": 0.5059,
"step": 8170
},
{
"epoch": 2.1052631578947367,
"grad_norm": 0.9377551674842834,
"learning_rate": 2.8952380952380952e-05,
"loss": 0.5245,
"step": 8180
},
{
"epoch": 2.1078368292369065,
"grad_norm": 1.2370498180389404,
"learning_rate": 2.892664092664093e-05,
"loss": 0.5056,
"step": 8190
},
{
"epoch": 2.110410500579076,
"grad_norm": 2.860896348953247,
"learning_rate": 2.8900900900900902e-05,
"loss": 0.575,
"step": 8200
},
{
"epoch": 2.1129841719212457,
"grad_norm": 1.3423675298690796,
"learning_rate": 2.8875160875160877e-05,
"loss": 0.5974,
"step": 8210
},
{
"epoch": 2.1155578432634154,
"grad_norm": 1.3402422666549683,
"learning_rate": 2.884942084942085e-05,
"loss": 0.582,
"step": 8220
},
{
"epoch": 2.118131514605585,
"grad_norm": 1.225522756576538,
"learning_rate": 2.8823680823680827e-05,
"loss": 0.5424,
"step": 8230
},
{
"epoch": 2.1207051859477546,
"grad_norm": 2.7710585594177246,
"learning_rate": 2.87979407979408e-05,
"loss": 0.5898,
"step": 8240
},
{
"epoch": 2.123278857289924,
"grad_norm": 1.2279800176620483,
"learning_rate": 2.8772200772200773e-05,
"loss": 0.5657,
"step": 8250
},
{
"epoch": 2.1258525286320937,
"grad_norm": 3.662959575653076,
"learning_rate": 2.8746460746460745e-05,
"loss": 0.4772,
"step": 8260
},
{
"epoch": 2.128426199974263,
"grad_norm": 1.2062429189682007,
"learning_rate": 2.8720720720720723e-05,
"loss": 0.5685,
"step": 8270
},
{
"epoch": 2.130999871316433,
"grad_norm": 3.6382572650909424,
"learning_rate": 2.8694980694980695e-05,
"loss": 0.5208,
"step": 8280
},
{
"epoch": 2.1335735426586027,
"grad_norm": 1.2735857963562012,
"learning_rate": 2.866924066924067e-05,
"loss": 0.6405,
"step": 8290
},
{
"epoch": 2.136147214000772,
"grad_norm": 1.1604303121566772,
"learning_rate": 2.864350064350064e-05,
"loss": 0.5217,
"step": 8300
},
{
"epoch": 2.138720885342942,
"grad_norm": 5.992644786834717,
"learning_rate": 2.861776061776062e-05,
"loss": 0.5425,
"step": 8310
},
{
"epoch": 2.141294556685111,
"grad_norm": 1.644361972808838,
"learning_rate": 2.859202059202059e-05,
"loss": 0.601,
"step": 8320
},
{
"epoch": 2.143868228027281,
"grad_norm": 1.2567894458770752,
"learning_rate": 2.8566280566280567e-05,
"loss": 0.614,
"step": 8330
},
{
"epoch": 2.1464418993694503,
"grad_norm": 1.1754887104034424,
"learning_rate": 2.8540540540540545e-05,
"loss": 0.5085,
"step": 8340
},
{
"epoch": 2.14901557071162,
"grad_norm": 0.9711121320724487,
"learning_rate": 2.8514800514800517e-05,
"loss": 0.6065,
"step": 8350
},
{
"epoch": 2.15158924205379,
"grad_norm": 0.790381669998169,
"learning_rate": 2.848906048906049e-05,
"loss": 0.5743,
"step": 8360
},
{
"epoch": 2.1541629133959592,
"grad_norm": 1.308838129043579,
"learning_rate": 2.8463320463320463e-05,
"loss": 0.5701,
"step": 8370
},
{
"epoch": 2.156736584738129,
"grad_norm": 1.1394035816192627,
"learning_rate": 2.843758043758044e-05,
"loss": 0.5446,
"step": 8380
},
{
"epoch": 2.1593102560802984,
"grad_norm": 0.9981347918510437,
"learning_rate": 2.8411840411840413e-05,
"loss": 0.5563,
"step": 8390
},
{
"epoch": 2.161883927422468,
"grad_norm": 1.1559290885925293,
"learning_rate": 2.8386100386100388e-05,
"loss": 0.554,
"step": 8400
},
{
"epoch": 2.164457598764638,
"grad_norm": 1.4176164865493774,
"learning_rate": 2.836036036036036e-05,
"loss": 0.5751,
"step": 8410
},
{
"epoch": 2.1670312701068073,
"grad_norm": 1.236528992652893,
"learning_rate": 2.8334620334620338e-05,
"loss": 0.6102,
"step": 8420
},
{
"epoch": 2.169604941448977,
"grad_norm": 1.4529129266738892,
"learning_rate": 2.830888030888031e-05,
"loss": 0.5281,
"step": 8430
},
{
"epoch": 2.1721786127911464,
"grad_norm": 2.4904539585113525,
"learning_rate": 2.8283140283140285e-05,
"loss": 0.5257,
"step": 8440
},
{
"epoch": 2.1747522841333162,
"grad_norm": 2.638392448425293,
"learning_rate": 2.8257400257400256e-05,
"loss": 0.5261,
"step": 8450
},
{
"epoch": 2.1773259554754856,
"grad_norm": 0.9878402352333069,
"learning_rate": 2.8231660231660235e-05,
"loss": 0.6199,
"step": 8460
},
{
"epoch": 2.1798996268176554,
"grad_norm": 1.2372921705245972,
"learning_rate": 2.8205920205920206e-05,
"loss": 0.5189,
"step": 8470
},
{
"epoch": 2.182473298159825,
"grad_norm": 1.3234797716140747,
"learning_rate": 2.818018018018018e-05,
"loss": 0.5475,
"step": 8480
},
{
"epoch": 2.1850469695019945,
"grad_norm": 1.4524154663085938,
"learning_rate": 2.8154440154440153e-05,
"loss": 0.5446,
"step": 8490
},
{
"epoch": 2.1876206408441643,
"grad_norm": 2.357632637023926,
"learning_rate": 2.812870012870013e-05,
"loss": 0.5848,
"step": 8500
},
{
"epoch": 2.1901943121863336,
"grad_norm": 0.8181639909744263,
"learning_rate": 2.8102960102960103e-05,
"loss": 0.4966,
"step": 8510
},
{
"epoch": 2.1927679835285034,
"grad_norm": 1.1121617555618286,
"learning_rate": 2.8077220077220078e-05,
"loss": 0.5481,
"step": 8520
},
{
"epoch": 2.1953416548706732,
"grad_norm": 1.105263113975525,
"learning_rate": 2.805148005148005e-05,
"loss": 0.5626,
"step": 8530
},
{
"epoch": 2.1979153262128426,
"grad_norm": 3.460721731185913,
"learning_rate": 2.8025740025740028e-05,
"loss": 0.55,
"step": 8540
},
{
"epoch": 2.2004889975550124,
"grad_norm": 1.0655298233032227,
"learning_rate": 2.8000000000000003e-05,
"loss": 0.555,
"step": 8550
},
{
"epoch": 2.2030626688971817,
"grad_norm": 1.5254201889038086,
"learning_rate": 2.7974259974259975e-05,
"loss": 0.5404,
"step": 8560
},
{
"epoch": 2.2056363402393515,
"grad_norm": 0.9580132961273193,
"learning_rate": 2.7948519948519953e-05,
"loss": 0.574,
"step": 8570
},
{
"epoch": 2.208210011581521,
"grad_norm": 0.87156081199646,
"learning_rate": 2.7922779922779925e-05,
"loss": 0.5618,
"step": 8580
},
{
"epoch": 2.2107836829236907,
"grad_norm": 1.130436897277832,
"learning_rate": 2.78970398970399e-05,
"loss": 0.555,
"step": 8590
},
{
"epoch": 2.2133573542658604,
"grad_norm": 1.059656023979187,
"learning_rate": 2.787129987129987e-05,
"loss": 0.6065,
"step": 8600
},
{
"epoch": 2.21593102560803,
"grad_norm": 2.916320562362671,
"learning_rate": 2.784555984555985e-05,
"loss": 0.4926,
"step": 8610
},
{
"epoch": 2.2185046969501996,
"grad_norm": 1.392271876335144,
"learning_rate": 2.781981981981982e-05,
"loss": 0.5869,
"step": 8620
},
{
"epoch": 2.221078368292369,
"grad_norm": 0.8909618258476257,
"learning_rate": 2.7794079794079796e-05,
"loss": 0.522,
"step": 8630
},
{
"epoch": 2.2236520396345387,
"grad_norm": 1.329585075378418,
"learning_rate": 2.7768339768339768e-05,
"loss": 0.6073,
"step": 8640
},
{
"epoch": 2.226225710976708,
"grad_norm": 0.8332410454750061,
"learning_rate": 2.7742599742599746e-05,
"loss": 0.5942,
"step": 8650
},
{
"epoch": 2.228799382318878,
"grad_norm": 1.180031180381775,
"learning_rate": 2.7716859716859718e-05,
"loss": 0.5056,
"step": 8660
},
{
"epoch": 2.2313730536610477,
"grad_norm": 0.7509261965751648,
"learning_rate": 2.7691119691119693e-05,
"loss": 0.5434,
"step": 8670
},
{
"epoch": 2.233946725003217,
"grad_norm": 1.0426994562149048,
"learning_rate": 2.7665379665379664e-05,
"loss": 0.5641,
"step": 8680
},
{
"epoch": 2.236520396345387,
"grad_norm": 0.997373640537262,
"learning_rate": 2.7639639639639643e-05,
"loss": 0.5676,
"step": 8690
},
{
"epoch": 2.239094067687556,
"grad_norm": 1.2469213008880615,
"learning_rate": 2.7613899613899614e-05,
"loss": 0.5751,
"step": 8700
},
{
"epoch": 2.241667739029726,
"grad_norm": 1.0776273012161255,
"learning_rate": 2.758815958815959e-05,
"loss": 0.529,
"step": 8710
},
{
"epoch": 2.2442414103718953,
"grad_norm": 1.02994966506958,
"learning_rate": 2.756241956241956e-05,
"loss": 0.5762,
"step": 8720
},
{
"epoch": 2.246815081714065,
"grad_norm": 3.1107001304626465,
"learning_rate": 2.753667953667954e-05,
"loss": 0.5354,
"step": 8730
},
{
"epoch": 2.249388753056235,
"grad_norm": 5.489597797393799,
"learning_rate": 2.751093951093951e-05,
"loss": 0.554,
"step": 8740
},
{
"epoch": 2.251962424398404,
"grad_norm": 1.1937365531921387,
"learning_rate": 2.7485199485199486e-05,
"loss": 0.5603,
"step": 8750
},
{
"epoch": 2.254536095740574,
"grad_norm": 0.7883516550064087,
"learning_rate": 2.7459459459459464e-05,
"loss": 0.5309,
"step": 8760
},
{
"epoch": 2.2571097670827434,
"grad_norm": 0.9160059094429016,
"learning_rate": 2.7433719433719436e-05,
"loss": 0.5477,
"step": 8770
},
{
"epoch": 2.259683438424913,
"grad_norm": 1.0978742837905884,
"learning_rate": 2.740797940797941e-05,
"loss": 0.5444,
"step": 8780
},
{
"epoch": 2.2622571097670825,
"grad_norm": 5.506754398345947,
"learning_rate": 2.7382239382239383e-05,
"loss": 0.5292,
"step": 8790
},
{
"epoch": 2.2648307811092523,
"grad_norm": 1.0431208610534668,
"learning_rate": 2.735649935649936e-05,
"loss": 0.522,
"step": 8800
},
{
"epoch": 2.267404452451422,
"grad_norm": 1.238174319267273,
"learning_rate": 2.7330759330759333e-05,
"loss": 0.5596,
"step": 8810
},
{
"epoch": 2.2699781237935914,
"grad_norm": 3.145578384399414,
"learning_rate": 2.7305019305019308e-05,
"loss": 0.5143,
"step": 8820
},
{
"epoch": 2.272551795135761,
"grad_norm": 0.794948399066925,
"learning_rate": 2.727927927927928e-05,
"loss": 0.5643,
"step": 8830
},
{
"epoch": 2.275125466477931,
"grad_norm": 0.9516599774360657,
"learning_rate": 2.7253539253539258e-05,
"loss": 0.55,
"step": 8840
},
{
"epoch": 2.2776991378201004,
"grad_norm": 1.01409912109375,
"learning_rate": 2.722779922779923e-05,
"loss": 0.4978,
"step": 8850
},
{
"epoch": 2.28027280916227,
"grad_norm": 2.1791932582855225,
"learning_rate": 2.7202059202059204e-05,
"loss": 0.568,
"step": 8860
},
{
"epoch": 2.2828464805044395,
"grad_norm": 0.9807432889938354,
"learning_rate": 2.7176319176319176e-05,
"loss": 0.6153,
"step": 8870
},
{
"epoch": 2.2854201518466093,
"grad_norm": 0.947486400604248,
"learning_rate": 2.7150579150579154e-05,
"loss": 0.5095,
"step": 8880
},
{
"epoch": 2.2879938231887786,
"grad_norm": 1.0243611335754395,
"learning_rate": 2.7124839124839126e-05,
"loss": 0.5296,
"step": 8890
},
{
"epoch": 2.2905674945309484,
"grad_norm": 0.9641122817993164,
"learning_rate": 2.70990990990991e-05,
"loss": 0.5597,
"step": 8900
},
{
"epoch": 2.2931411658731182,
"grad_norm": 0.9149156212806702,
"learning_rate": 2.7073359073359072e-05,
"loss": 0.5361,
"step": 8910
},
{
"epoch": 2.2957148372152876,
"grad_norm": 1.1571760177612305,
"learning_rate": 2.704761904761905e-05,
"loss": 0.5116,
"step": 8920
},
{
"epoch": 2.2982885085574574,
"grad_norm": 1.1743327379226685,
"learning_rate": 2.7021879021879022e-05,
"loss": 0.5913,
"step": 8930
},
{
"epoch": 2.3008621798996267,
"grad_norm": 0.9054014682769775,
"learning_rate": 2.6996138996138997e-05,
"loss": 0.5819,
"step": 8940
},
{
"epoch": 2.3034358512417965,
"grad_norm": 1.0355494022369385,
"learning_rate": 2.697039897039897e-05,
"loss": 0.5633,
"step": 8950
},
{
"epoch": 2.306009522583966,
"grad_norm": 2.785370111465454,
"learning_rate": 2.6944658944658947e-05,
"loss": 0.5018,
"step": 8960
},
{
"epoch": 2.3085831939261356,
"grad_norm": 1.2618205547332764,
"learning_rate": 2.6918918918918922e-05,
"loss": 0.5516,
"step": 8970
},
{
"epoch": 2.3111568652683054,
"grad_norm": 1.069515347480774,
"learning_rate": 2.6893178893178894e-05,
"loss": 0.5578,
"step": 8980
},
{
"epoch": 2.313730536610475,
"grad_norm": 2.962970018386841,
"learning_rate": 2.6867438867438872e-05,
"loss": 0.565,
"step": 8990
},
{
"epoch": 2.3163042079526446,
"grad_norm": 1.353009581565857,
"learning_rate": 2.6841698841698844e-05,
"loss": 0.507,
"step": 9000
},
{
"epoch": 2.3163042079526446,
"eval_loss": 0.7905660271644592,
"eval_runtime": 395.6161,
"eval_samples_per_second": 49.103,
"eval_steps_per_second": 2.457,
"eval_token_accuracy": 0.00036479171569781123,
"step": 9000
},
{
"epoch": 2.318877879294814,
"grad_norm": 1.2040934562683105,
"learning_rate": 2.681595881595882e-05,
"loss": 0.5755,
"step": 9010
},
{
"epoch": 2.3214515506369837,
"grad_norm": 0.8578842282295227,
"learning_rate": 2.679021879021879e-05,
"loss": 0.5335,
"step": 9020
},
{
"epoch": 2.324025221979153,
"grad_norm": 0.989274799823761,
"learning_rate": 2.676447876447877e-05,
"loss": 0.5647,
"step": 9030
},
{
"epoch": 2.326598893321323,
"grad_norm": 1.0703078508377075,
"learning_rate": 2.6738738738738737e-05,
"loss": 0.5929,
"step": 9040
},
{
"epoch": 2.3291725646634927,
"grad_norm": 0.8797844052314758,
"learning_rate": 2.6712998712998716e-05,
"loss": 0.548,
"step": 9050
},
{
"epoch": 2.331746236005662,
"grad_norm": 1.1639105081558228,
"learning_rate": 2.6687258687258687e-05,
"loss": 0.5716,
"step": 9060
},
{
"epoch": 2.334319907347832,
"grad_norm": 1.3019888401031494,
"learning_rate": 2.6661518661518666e-05,
"loss": 0.5555,
"step": 9070
},
{
"epoch": 2.336893578690001,
"grad_norm": 4.5147271156311035,
"learning_rate": 2.6635778635778634e-05,
"loss": 0.6358,
"step": 9080
},
{
"epoch": 2.339467250032171,
"grad_norm": 1.306016206741333,
"learning_rate": 2.6610038610038612e-05,
"loss": 0.5318,
"step": 9090
},
{
"epoch": 2.3420409213743403,
"grad_norm": 1.211195468902588,
"learning_rate": 2.6584298584298584e-05,
"loss": 0.5253,
"step": 9100
},
{
"epoch": 2.34461459271651,
"grad_norm": 1.1488116979599,
"learning_rate": 2.655855855855856e-05,
"loss": 0.6381,
"step": 9110
},
{
"epoch": 2.34718826405868,
"grad_norm": 1.1285436153411865,
"learning_rate": 2.653281853281853e-05,
"loss": 0.5281,
"step": 9120
},
{
"epoch": 2.349761935400849,
"grad_norm": 1.651699423789978,
"learning_rate": 2.650707850707851e-05,
"loss": 0.5664,
"step": 9130
},
{
"epoch": 2.352335606743019,
"grad_norm": 1.1540205478668213,
"learning_rate": 2.648133848133848e-05,
"loss": 0.5808,
"step": 9140
},
{
"epoch": 2.3549092780851884,
"grad_norm": 1.0008643865585327,
"learning_rate": 2.6455598455598455e-05,
"loss": 0.5613,
"step": 9150
},
{
"epoch": 2.357482949427358,
"grad_norm": 0.9590166211128235,
"learning_rate": 2.6429858429858427e-05,
"loss": 0.6062,
"step": 9160
},
{
"epoch": 2.3600566207695275,
"grad_norm": 0.9757869243621826,
"learning_rate": 2.6404118404118405e-05,
"loss": 0.5632,
"step": 9170
},
{
"epoch": 2.3626302921116973,
"grad_norm": 0.9153413772583008,
"learning_rate": 2.6378378378378384e-05,
"loss": 0.5501,
"step": 9180
},
{
"epoch": 2.365203963453867,
"grad_norm": 1.4172649383544922,
"learning_rate": 2.6352638352638352e-05,
"loss": 0.5803,
"step": 9190
},
{
"epoch": 2.3677776347960364,
"grad_norm": 1.612728238105774,
"learning_rate": 2.632689832689833e-05,
"loss": 0.6375,
"step": 9200
},
{
"epoch": 2.370351306138206,
"grad_norm": 1.0975571870803833,
"learning_rate": 2.6301158301158302e-05,
"loss": 0.5791,
"step": 9210
},
{
"epoch": 2.3729249774803756,
"grad_norm": 3.388500452041626,
"learning_rate": 2.6275418275418277e-05,
"loss": 0.5247,
"step": 9220
},
{
"epoch": 2.3754986488225454,
"grad_norm": 0.9185710549354553,
"learning_rate": 2.624967824967825e-05,
"loss": 0.508,
"step": 9230
},
{
"epoch": 2.378072320164715,
"grad_norm": 1.2001655101776123,
"learning_rate": 2.6223938223938227e-05,
"loss": 0.5771,
"step": 9240
},
{
"epoch": 2.3806459915068845,
"grad_norm": 1.817859411239624,
"learning_rate": 2.61981981981982e-05,
"loss": 0.5443,
"step": 9250
},
{
"epoch": 2.3832196628490543,
"grad_norm": 1.0994833707809448,
"learning_rate": 2.6172458172458174e-05,
"loss": 0.5538,
"step": 9260
},
{
"epoch": 2.3857933341912236,
"grad_norm": 1.3039990663528442,
"learning_rate": 2.6146718146718145e-05,
"loss": 0.5196,
"step": 9270
},
{
"epoch": 2.3883670055333934,
"grad_norm": 2.651137113571167,
"learning_rate": 2.6120978120978124e-05,
"loss": 0.5538,
"step": 9280
},
{
"epoch": 2.390940676875563,
"grad_norm": 1.00489342212677,
"learning_rate": 2.6095238095238095e-05,
"loss": 0.5978,
"step": 9290
},
{
"epoch": 2.3935143482177326,
"grad_norm": 2.647886276245117,
"learning_rate": 2.606949806949807e-05,
"loss": 0.5674,
"step": 9300
},
{
"epoch": 2.3960880195599024,
"grad_norm": 1.094959020614624,
"learning_rate": 2.6043758043758042e-05,
"loss": 0.5768,
"step": 9310
},
{
"epoch": 2.3986616909020717,
"grad_norm": 3.2877941131591797,
"learning_rate": 2.601801801801802e-05,
"loss": 0.5772,
"step": 9320
},
{
"epoch": 2.4012353622442415,
"grad_norm": 0.7123093008995056,
"learning_rate": 2.5992277992277992e-05,
"loss": 0.5598,
"step": 9330
},
{
"epoch": 2.403809033586411,
"grad_norm": 0.902870237827301,
"learning_rate": 2.5966537966537967e-05,
"loss": 0.5089,
"step": 9340
},
{
"epoch": 2.4063827049285806,
"grad_norm": 1.3696403503417969,
"learning_rate": 2.594079794079794e-05,
"loss": 0.5726,
"step": 9350
},
{
"epoch": 2.4089563762707504,
"grad_norm": 3.460519790649414,
"learning_rate": 2.5915057915057917e-05,
"loss": 0.5474,
"step": 9360
},
{
"epoch": 2.41153004761292,
"grad_norm": 1.307590365409851,
"learning_rate": 2.588931788931789e-05,
"loss": 0.6438,
"step": 9370
},
{
"epoch": 2.4141037189550896,
"grad_norm": 3.9630610942840576,
"learning_rate": 2.5863577863577864e-05,
"loss": 0.5163,
"step": 9380
},
{
"epoch": 2.416677390297259,
"grad_norm": 1.028784990310669,
"learning_rate": 2.5837837837837842e-05,
"loss": 0.5345,
"step": 9390
},
{
"epoch": 2.4192510616394287,
"grad_norm": 0.9117390513420105,
"learning_rate": 2.5812097812097814e-05,
"loss": 0.5966,
"step": 9400
},
{
"epoch": 2.421824732981598,
"grad_norm": 3.028024911880493,
"learning_rate": 2.578635778635779e-05,
"loss": 0.5833,
"step": 9410
},
{
"epoch": 2.424398404323768,
"grad_norm": 1.1407766342163086,
"learning_rate": 2.576061776061776e-05,
"loss": 0.5501,
"step": 9420
},
{
"epoch": 2.4269720756659376,
"grad_norm": 1.0818943977355957,
"learning_rate": 2.573487773487774e-05,
"loss": 0.5323,
"step": 9430
},
{
"epoch": 2.429545747008107,
"grad_norm": 1.1713372468948364,
"learning_rate": 2.570913770913771e-05,
"loss": 0.6247,
"step": 9440
},
{
"epoch": 2.432119418350277,
"grad_norm": 1.3347368240356445,
"learning_rate": 2.5683397683397685e-05,
"loss": 0.6161,
"step": 9450
},
{
"epoch": 2.434693089692446,
"grad_norm": 1.3403719663619995,
"learning_rate": 2.5657657657657657e-05,
"loss": 0.543,
"step": 9460
},
{
"epoch": 2.437266761034616,
"grad_norm": 1.3469754457473755,
"learning_rate": 2.5631917631917635e-05,
"loss": 0.5616,
"step": 9470
},
{
"epoch": 2.4398404323767853,
"grad_norm": 1.171751856803894,
"learning_rate": 2.5606177606177607e-05,
"loss": 0.5982,
"step": 9480
},
{
"epoch": 2.442414103718955,
"grad_norm": 2.609652042388916,
"learning_rate": 2.5580437580437582e-05,
"loss": 0.5782,
"step": 9490
},
{
"epoch": 2.444987775061125,
"grad_norm": 1.0340471267700195,
"learning_rate": 2.5554697554697553e-05,
"loss": 0.5698,
"step": 9500
},
{
"epoch": 2.447561446403294,
"grad_norm": 0.9706631302833557,
"learning_rate": 2.5528957528957532e-05,
"loss": 0.5886,
"step": 9510
},
{
"epoch": 2.450135117745464,
"grad_norm": 1.287369966506958,
"learning_rate": 2.5503217503217503e-05,
"loss": 0.5721,
"step": 9520
},
{
"epoch": 2.4527087890876333,
"grad_norm": 0.9612070918083191,
"learning_rate": 2.547747747747748e-05,
"loss": 0.6088,
"step": 9530
},
{
"epoch": 2.455282460429803,
"grad_norm": 2.4824914932250977,
"learning_rate": 2.545173745173745e-05,
"loss": 0.5057,
"step": 9540
},
{
"epoch": 2.4578561317719725,
"grad_norm": 2.4112958908081055,
"learning_rate": 2.542599742599743e-05,
"loss": 0.5689,
"step": 9550
},
{
"epoch": 2.4604298031141423,
"grad_norm": 0.7653863430023193,
"learning_rate": 2.54002574002574e-05,
"loss": 0.5119,
"step": 9560
},
{
"epoch": 2.463003474456312,
"grad_norm": 1.1979918479919434,
"learning_rate": 2.5374517374517375e-05,
"loss": 0.5162,
"step": 9570
},
{
"epoch": 2.4655771457984814,
"grad_norm": 1.1979789733886719,
"learning_rate": 2.5348777348777347e-05,
"loss": 0.5627,
"step": 9580
},
{
"epoch": 2.468150817140651,
"grad_norm": 2.0756313800811768,
"learning_rate": 2.5323037323037325e-05,
"loss": 0.5502,
"step": 9590
},
{
"epoch": 2.4707244884828206,
"grad_norm": 3.4498813152313232,
"learning_rate": 2.52972972972973e-05,
"loss": 0.5584,
"step": 9600
},
{
"epoch": 2.4732981598249903,
"grad_norm": 1.145731806755066,
"learning_rate": 2.527155727155727e-05,
"loss": 0.5082,
"step": 9610
},
{
"epoch": 2.47587183116716,
"grad_norm": 1.515875220298767,
"learning_rate": 2.524581724581725e-05,
"loss": 0.5051,
"step": 9620
},
{
"epoch": 2.4784455025093295,
"grad_norm": 0.8977373838424683,
"learning_rate": 2.522007722007722e-05,
"loss": 0.6196,
"step": 9630
},
{
"epoch": 2.4810191738514993,
"grad_norm": 1.2553268671035767,
"learning_rate": 2.5194337194337197e-05,
"loss": 0.5503,
"step": 9640
},
{
"epoch": 2.4835928451936686,
"grad_norm": 0.8410692811012268,
"learning_rate": 2.5168597168597168e-05,
"loss": 0.5605,
"step": 9650
},
{
"epoch": 2.4861665165358384,
"grad_norm": 1.2535247802734375,
"learning_rate": 2.5142857142857147e-05,
"loss": 0.5164,
"step": 9660
},
{
"epoch": 2.488740187878008,
"grad_norm": 1.1014875173568726,
"learning_rate": 2.5117117117117118e-05,
"loss": 0.5934,
"step": 9670
},
{
"epoch": 2.4913138592201776,
"grad_norm": 1.0416326522827148,
"learning_rate": 2.5091377091377093e-05,
"loss": 0.6187,
"step": 9680
},
{
"epoch": 2.4938875305623474,
"grad_norm": 1.3807120323181152,
"learning_rate": 2.5065637065637065e-05,
"loss": 0.5462,
"step": 9690
},
{
"epoch": 2.4964612019045167,
"grad_norm": 1.4746110439300537,
"learning_rate": 2.5039897039897043e-05,
"loss": 0.5576,
"step": 9700
},
{
"epoch": 2.4990348732466865,
"grad_norm": 1.372180461883545,
"learning_rate": 2.5014157014157015e-05,
"loss": 0.5218,
"step": 9710
},
{
"epoch": 2.501608544588856,
"grad_norm": 0.8345748782157898,
"learning_rate": 2.498841698841699e-05,
"loss": 0.5393,
"step": 9720
},
{
"epoch": 2.5041822159310256,
"grad_norm": 1.1809195280075073,
"learning_rate": 2.4962676962676965e-05,
"loss": 0.5577,
"step": 9730
},
{
"epoch": 2.5067558872731954,
"grad_norm": 1.1558030843734741,
"learning_rate": 2.493693693693694e-05,
"loss": 0.6091,
"step": 9740
},
{
"epoch": 2.5093295586153648,
"grad_norm": 0.8379091620445251,
"learning_rate": 2.491119691119691e-05,
"loss": 0.5787,
"step": 9750
},
{
"epoch": 2.5119032299575346,
"grad_norm": 1.2100491523742676,
"learning_rate": 2.4885456885456886e-05,
"loss": 0.5179,
"step": 9760
},
{
"epoch": 2.514476901299704,
"grad_norm": 2.15959095954895,
"learning_rate": 2.485971685971686e-05,
"loss": 0.5448,
"step": 9770
},
{
"epoch": 2.5170505726418737,
"grad_norm": 1.1132477521896362,
"learning_rate": 2.4833976833976836e-05,
"loss": 0.5354,
"step": 9780
},
{
"epoch": 2.519624243984043,
"grad_norm": 1.1005936861038208,
"learning_rate": 2.4808236808236808e-05,
"loss": 0.6376,
"step": 9790
},
{
"epoch": 2.522197915326213,
"grad_norm": 1.0520378351211548,
"learning_rate": 2.4782496782496783e-05,
"loss": 0.5468,
"step": 9800
},
{
"epoch": 2.5247715866683826,
"grad_norm": 3.0992591381073,
"learning_rate": 2.4756756756756758e-05,
"loss": 0.6128,
"step": 9810
},
{
"epoch": 2.527345258010552,
"grad_norm": 0.9129619002342224,
"learning_rate": 2.4731016731016733e-05,
"loss": 0.5671,
"step": 9820
},
{
"epoch": 2.529918929352722,
"grad_norm": 4.400023460388184,
"learning_rate": 2.4705276705276705e-05,
"loss": 0.5863,
"step": 9830
},
{
"epoch": 2.532492600694891,
"grad_norm": 1.1237571239471436,
"learning_rate": 2.467953667953668e-05,
"loss": 0.5115,
"step": 9840
},
{
"epoch": 2.535066272037061,
"grad_norm": 1.2072199583053589,
"learning_rate": 2.4653796653796655e-05,
"loss": 0.5748,
"step": 9850
},
{
"epoch": 2.5376399433792303,
"grad_norm": 0.8158368468284607,
"learning_rate": 2.462805662805663e-05,
"loss": 0.6466,
"step": 9860
},
{
"epoch": 2.5402136147214,
"grad_norm": 1.3030297756195068,
"learning_rate": 2.46023166023166e-05,
"loss": 0.5114,
"step": 9870
},
{
"epoch": 2.54278728606357,
"grad_norm": 1.1183769702911377,
"learning_rate": 2.4576576576576576e-05,
"loss": 0.5518,
"step": 9880
},
{
"epoch": 2.545360957405739,
"grad_norm": 0.9065691828727722,
"learning_rate": 2.455083655083655e-05,
"loss": 0.522,
"step": 9890
},
{
"epoch": 2.547934628747909,
"grad_norm": 1.4766273498535156,
"learning_rate": 2.4525096525096526e-05,
"loss": 0.587,
"step": 9900
},
{
"epoch": 2.5505083000900783,
"grad_norm": 4.340095043182373,
"learning_rate": 2.44993564993565e-05,
"loss": 0.6021,
"step": 9910
},
{
"epoch": 2.553081971432248,
"grad_norm": 0.8867147564888,
"learning_rate": 2.4473616473616476e-05,
"loss": 0.5076,
"step": 9920
},
{
"epoch": 2.5556556427744175,
"grad_norm": 1.2805358171463013,
"learning_rate": 2.444787644787645e-05,
"loss": 0.5817,
"step": 9930
},
{
"epoch": 2.5582293141165873,
"grad_norm": 1.0419846773147583,
"learning_rate": 2.4422136422136423e-05,
"loss": 0.5846,
"step": 9940
},
{
"epoch": 2.560802985458757,
"grad_norm": 1.3988200426101685,
"learning_rate": 2.4396396396396398e-05,
"loss": 0.5483,
"step": 9950
},
{
"epoch": 2.5633766568009264,
"grad_norm": 1.3084689378738403,
"learning_rate": 2.4370656370656373e-05,
"loss": 0.5685,
"step": 9960
},
{
"epoch": 2.565950328143096,
"grad_norm": 1.160992980003357,
"learning_rate": 2.4344916344916348e-05,
"loss": 0.5852,
"step": 9970
},
{
"epoch": 2.568523999485266,
"grad_norm": 1.859298825263977,
"learning_rate": 2.431917631917632e-05,
"loss": 0.5561,
"step": 9980
},
{
"epoch": 2.5710976708274353,
"grad_norm": 0.9575828313827515,
"learning_rate": 2.4293436293436294e-05,
"loss": 0.5501,
"step": 9990
},
{
"epoch": 2.5736713421696047,
"grad_norm": 1.3349659442901611,
"learning_rate": 2.426769626769627e-05,
"loss": 0.5458,
"step": 10000
},
{
"epoch": 2.5736713421696047,
"eval_loss": 0.7813047170639038,
"eval_runtime": 395.5545,
"eval_samples_per_second": 49.111,
"eval_steps_per_second": 2.457,
"eval_token_accuracy": 0.0003636529278236245,
"step": 10000
},
{
"epoch": 2.5762450135117745,
"grad_norm": 0.8265610933303833,
"learning_rate": 2.4241956241956244e-05,
"loss": 0.5199,
"step": 10010
},
{
"epoch": 2.5788186848539443,
"grad_norm": 1.1226060390472412,
"learning_rate": 2.4216216216216216e-05,
"loss": 0.5416,
"step": 10020
},
{
"epoch": 2.5813923561961136,
"grad_norm": 1.006882905960083,
"learning_rate": 2.419047619047619e-05,
"loss": 0.6422,
"step": 10030
},
{
"epoch": 2.5839660275382834,
"grad_norm": 1.1571662425994873,
"learning_rate": 2.4164736164736166e-05,
"loss": 0.5548,
"step": 10040
},
{
"epoch": 2.586539698880453,
"grad_norm": 0.9538158774375916,
"learning_rate": 2.413899613899614e-05,
"loss": 0.5416,
"step": 10050
},
{
"epoch": 2.5891133702226226,
"grad_norm": 3.1586387157440186,
"learning_rate": 2.4113256113256113e-05,
"loss": 0.5445,
"step": 10060
},
{
"epoch": 2.591687041564792,
"grad_norm": 0.9174941778182983,
"learning_rate": 2.4087516087516088e-05,
"loss": 0.5539,
"step": 10070
},
{
"epoch": 2.5942607129069617,
"grad_norm": 0.8916310667991638,
"learning_rate": 2.4061776061776063e-05,
"loss": 0.5487,
"step": 10080
},
{
"epoch": 2.5968343842491315,
"grad_norm": 3.3344202041625977,
"learning_rate": 2.4036036036036034e-05,
"loss": 0.5935,
"step": 10090
},
{
"epoch": 2.599408055591301,
"grad_norm": 1.8310283422470093,
"learning_rate": 2.4010296010296013e-05,
"loss": 0.5507,
"step": 10100
},
{
"epoch": 2.6019817269334706,
"grad_norm": 0.95711749792099,
"learning_rate": 2.3984555984555988e-05,
"loss": 0.5899,
"step": 10110
},
{
"epoch": 2.6045553982756404,
"grad_norm": 1.1293368339538574,
"learning_rate": 2.3958815958815963e-05,
"loss": 0.5468,
"step": 10120
},
{
"epoch": 2.6071290696178098,
"grad_norm": 0.8633250594139099,
"learning_rate": 2.3933075933075934e-05,
"loss": 0.5763,
"step": 10130
},
{
"epoch": 2.6097027409599796,
"grad_norm": 1.343990445137024,
"learning_rate": 2.390733590733591e-05,
"loss": 0.5839,
"step": 10140
},
{
"epoch": 2.612276412302149,
"grad_norm": 1.5311845541000366,
"learning_rate": 2.3881595881595884e-05,
"loss": 0.531,
"step": 10150
},
{
"epoch": 2.6148500836443187,
"grad_norm": 1.0829716920852661,
"learning_rate": 2.385585585585586e-05,
"loss": 0.5402,
"step": 10160
},
{
"epoch": 2.617423754986488,
"grad_norm": 1.5562556982040405,
"learning_rate": 2.383011583011583e-05,
"loss": 0.5652,
"step": 10170
},
{
"epoch": 2.619997426328658,
"grad_norm": 1.0857690572738647,
"learning_rate": 2.3804375804375806e-05,
"loss": 0.6161,
"step": 10180
},
{
"epoch": 2.6225710976708276,
"grad_norm": 1.1553452014923096,
"learning_rate": 2.377863577863578e-05,
"loss": 0.5975,
"step": 10190
},
{
"epoch": 2.625144769012997,
"grad_norm": 0.9992988109588623,
"learning_rate": 2.3752895752895752e-05,
"loss": 0.5837,
"step": 10200
},
{
"epoch": 2.6277184403551668,
"grad_norm": 1.0627033710479736,
"learning_rate": 2.3727155727155727e-05,
"loss": 0.5527,
"step": 10210
},
{
"epoch": 2.630292111697336,
"grad_norm": 0.8239421248435974,
"learning_rate": 2.3701415701415702e-05,
"loss": 0.5822,
"step": 10220
},
{
"epoch": 2.632865783039506,
"grad_norm": 2.0370047092437744,
"learning_rate": 2.3675675675675677e-05,
"loss": 0.55,
"step": 10230
},
{
"epoch": 2.6354394543816753,
"grad_norm": 1.0203136205673218,
"learning_rate": 2.364993564993565e-05,
"loss": 0.5289,
"step": 10240
},
{
"epoch": 2.638013125723845,
"grad_norm": 1.1402500867843628,
"learning_rate": 2.3624195624195624e-05,
"loss": 0.5492,
"step": 10250
},
{
"epoch": 2.640586797066015,
"grad_norm": 1.1327241659164429,
"learning_rate": 2.35984555984556e-05,
"loss": 0.5416,
"step": 10260
},
{
"epoch": 2.643160468408184,
"grad_norm": 0.9209975004196167,
"learning_rate": 2.3572715572715574e-05,
"loss": 0.5442,
"step": 10270
},
{
"epoch": 2.645734139750354,
"grad_norm": 1.3790748119354248,
"learning_rate": 2.3546975546975546e-05,
"loss": 0.6103,
"step": 10280
},
{
"epoch": 2.6483078110925233,
"grad_norm": 1.2023276090621948,
"learning_rate": 2.352123552123552e-05,
"loss": 0.5886,
"step": 10290
},
{
"epoch": 2.650881482434693,
"grad_norm": 1.0166372060775757,
"learning_rate": 2.3495495495495496e-05,
"loss": 0.5741,
"step": 10300
},
{
"epoch": 2.6534551537768625,
"grad_norm": 1.1085914373397827,
"learning_rate": 2.346975546975547e-05,
"loss": 0.5302,
"step": 10310
},
{
"epoch": 2.6560288251190323,
"grad_norm": 1.1305118799209595,
"learning_rate": 2.3444015444015446e-05,
"loss": 0.5851,
"step": 10320
},
{
"epoch": 2.658602496461202,
"grad_norm": 4.382242202758789,
"learning_rate": 2.341827541827542e-05,
"loss": 0.6207,
"step": 10330
},
{
"epoch": 2.6611761678033714,
"grad_norm": 1.3495676517486572,
"learning_rate": 2.3392535392535396e-05,
"loss": 0.531,
"step": 10340
},
{
"epoch": 2.663749839145541,
"grad_norm": 1.2372089624404907,
"learning_rate": 2.3366795366795367e-05,
"loss": 0.5543,
"step": 10350
},
{
"epoch": 2.666323510487711,
"grad_norm": 1.2781168222427368,
"learning_rate": 2.3341055341055342e-05,
"loss": 0.5379,
"step": 10360
},
{
"epoch": 2.6688971818298803,
"grad_norm": 0.9602215886116028,
"learning_rate": 2.3315315315315317e-05,
"loss": 0.5677,
"step": 10370
},
{
"epoch": 2.6714708531720497,
"grad_norm": 1.4778882265090942,
"learning_rate": 2.3289575289575292e-05,
"loss": 0.5196,
"step": 10380
},
{
"epoch": 2.6740445245142195,
"grad_norm": 1.1406737565994263,
"learning_rate": 2.3263835263835264e-05,
"loss": 0.5886,
"step": 10390
},
{
"epoch": 2.6766181958563893,
"grad_norm": 1.3045039176940918,
"learning_rate": 2.323809523809524e-05,
"loss": 0.5468,
"step": 10400
},
{
"epoch": 2.6791918671985586,
"grad_norm": 1.1120688915252686,
"learning_rate": 2.3212355212355214e-05,
"loss": 0.6246,
"step": 10410
},
{
"epoch": 2.6817655385407284,
"grad_norm": 3.4695723056793213,
"learning_rate": 2.318661518661519e-05,
"loss": 0.5557,
"step": 10420
},
{
"epoch": 2.684339209882898,
"grad_norm": 1.4263606071472168,
"learning_rate": 2.316087516087516e-05,
"loss": 0.5656,
"step": 10430
},
{
"epoch": 2.6869128812250676,
"grad_norm": 0.7746553421020508,
"learning_rate": 2.3135135135135136e-05,
"loss": 0.5458,
"step": 10440
},
{
"epoch": 2.689486552567237,
"grad_norm": 1.4084376096725464,
"learning_rate": 2.310939510939511e-05,
"loss": 0.5506,
"step": 10450
},
{
"epoch": 2.6920602239094067,
"grad_norm": 0.9711858034133911,
"learning_rate": 2.3083655083655086e-05,
"loss": 0.5219,
"step": 10460
},
{
"epoch": 2.6946338952515765,
"grad_norm": 1.1122053861618042,
"learning_rate": 2.3057915057915057e-05,
"loss": 0.5287,
"step": 10470
},
{
"epoch": 2.697207566593746,
"grad_norm": 1.0087629556655884,
"learning_rate": 2.3032175032175032e-05,
"loss": 0.6167,
"step": 10480
},
{
"epoch": 2.6997812379359156,
"grad_norm": 1.0684638023376465,
"learning_rate": 2.3009009009009013e-05,
"loss": 0.5191,
"step": 10490
},
{
"epoch": 2.7023549092780854,
"grad_norm": 1.1910923719406128,
"learning_rate": 2.2983268983268984e-05,
"loss": 0.551,
"step": 10500
},
{
"epoch": 2.7049285806202548,
"grad_norm": 1.287802815437317,
"learning_rate": 2.295752895752896e-05,
"loss": 0.5438,
"step": 10510
},
{
"epoch": 2.7075022519624246,
"grad_norm": 0.9688395857810974,
"learning_rate": 2.2931788931788934e-05,
"loss": 0.5567,
"step": 10520
},
{
"epoch": 2.710075923304594,
"grad_norm": 1.0128886699676514,
"learning_rate": 2.2906048906048906e-05,
"loss": 0.6127,
"step": 10530
},
{
"epoch": 2.7126495946467637,
"grad_norm": 1.974613070487976,
"learning_rate": 2.288030888030888e-05,
"loss": 0.5642,
"step": 10540
},
{
"epoch": 2.715223265988933,
"grad_norm": 2.323258876800537,
"learning_rate": 2.2854568854568856e-05,
"loss": 0.5192,
"step": 10550
},
{
"epoch": 2.717796937331103,
"grad_norm": 2.6239235401153564,
"learning_rate": 2.282882882882883e-05,
"loss": 0.5786,
"step": 10560
},
{
"epoch": 2.7203706086732726,
"grad_norm": 2.535557508468628,
"learning_rate": 2.2803088803088802e-05,
"loss": 0.5732,
"step": 10570
},
{
"epoch": 2.722944280015442,
"grad_norm": 1.4049146175384521,
"learning_rate": 2.2777348777348777e-05,
"loss": 0.5613,
"step": 10580
},
{
"epoch": 2.7255179513576118,
"grad_norm": 0.9087219834327698,
"learning_rate": 2.2751608751608752e-05,
"loss": 0.5429,
"step": 10590
},
{
"epoch": 2.728091622699781,
"grad_norm": 1.2348556518554688,
"learning_rate": 2.2725868725868727e-05,
"loss": 0.6106,
"step": 10600
},
{
"epoch": 2.730665294041951,
"grad_norm": 0.954634964466095,
"learning_rate": 2.27001287001287e-05,
"loss": 0.5648,
"step": 10610
},
{
"epoch": 2.7332389653841203,
"grad_norm": 0.9417296051979065,
"learning_rate": 2.2674388674388674e-05,
"loss": 0.5641,
"step": 10620
},
{
"epoch": 2.73581263672629,
"grad_norm": 3.07540225982666,
"learning_rate": 2.264864864864865e-05,
"loss": 0.6197,
"step": 10630
},
{
"epoch": 2.73838630806846,
"grad_norm": 1.1706743240356445,
"learning_rate": 2.2622908622908624e-05,
"loss": 0.5237,
"step": 10640
},
{
"epoch": 2.740959979410629,
"grad_norm": 1.0390594005584717,
"learning_rate": 2.2597168597168596e-05,
"loss": 0.5377,
"step": 10650
},
{
"epoch": 2.743533650752799,
"grad_norm": 4.094557762145996,
"learning_rate": 2.257142857142857e-05,
"loss": 0.5717,
"step": 10660
},
{
"epoch": 2.7461073220949683,
"grad_norm": 3.71659255027771,
"learning_rate": 2.254568854568855e-05,
"loss": 0.5725,
"step": 10670
},
{
"epoch": 2.748680993437138,
"grad_norm": 1.1988261938095093,
"learning_rate": 2.251994851994852e-05,
"loss": 0.565,
"step": 10680
},
{
"epoch": 2.7512546647793075,
"grad_norm": 1.5038524866104126,
"learning_rate": 2.2494208494208496e-05,
"loss": 0.5142,
"step": 10690
},
{
"epoch": 2.7538283361214773,
"grad_norm": 1.0578770637512207,
"learning_rate": 2.246846846846847e-05,
"loss": 0.5922,
"step": 10700
},
{
"epoch": 2.756402007463647,
"grad_norm": 1.151419997215271,
"learning_rate": 2.2442728442728446e-05,
"loss": 0.6072,
"step": 10710
},
{
"epoch": 2.7589756788058164,
"grad_norm": 1.1115498542785645,
"learning_rate": 2.2416988416988417e-05,
"loss": 0.5549,
"step": 10720
},
{
"epoch": 2.761549350147986,
"grad_norm": 2.30755352973938,
"learning_rate": 2.2391248391248392e-05,
"loss": 0.5863,
"step": 10730
},
{
"epoch": 2.7641230214901555,
"grad_norm": 2.3531439304351807,
"learning_rate": 2.2365508365508367e-05,
"loss": 0.5236,
"step": 10740
},
{
"epoch": 2.7666966928323253,
"grad_norm": 1.0621081590652466,
"learning_rate": 2.2339768339768342e-05,
"loss": 0.5689,
"step": 10750
},
{
"epoch": 2.7692703641744947,
"grad_norm": 3.393990993499756,
"learning_rate": 2.2314028314028314e-05,
"loss": 0.5736,
"step": 10760
},
{
"epoch": 2.7718440355166645,
"grad_norm": 0.9856056571006775,
"learning_rate": 2.228828828828829e-05,
"loss": 0.5719,
"step": 10770
},
{
"epoch": 2.7744177068588343,
"grad_norm": 1.3142099380493164,
"learning_rate": 2.2262548262548264e-05,
"loss": 0.5546,
"step": 10780
},
{
"epoch": 2.7769913782010036,
"grad_norm": 0.969963550567627,
"learning_rate": 2.223680823680824e-05,
"loss": 0.5425,
"step": 10790
},
{
"epoch": 2.7795650495431734,
"grad_norm": 2.802344560623169,
"learning_rate": 2.221106821106821e-05,
"loss": 0.5235,
"step": 10800
},
{
"epoch": 2.782138720885343,
"grad_norm": 1.5331348180770874,
"learning_rate": 2.2185328185328186e-05,
"loss": 0.5436,
"step": 10810
},
{
"epoch": 2.7847123922275125,
"grad_norm": 0.9570822715759277,
"learning_rate": 2.215958815958816e-05,
"loss": 0.5615,
"step": 10820
},
{
"epoch": 2.787286063569682,
"grad_norm": 3.3715949058532715,
"learning_rate": 2.2133848133848136e-05,
"loss": 0.5473,
"step": 10830
},
{
"epoch": 2.7898597349118517,
"grad_norm": 1.2080599069595337,
"learning_rate": 2.2108108108108107e-05,
"loss": 0.6758,
"step": 10840
},
{
"epoch": 2.7924334062540215,
"grad_norm": 0.9372673034667969,
"learning_rate": 2.2082368082368082e-05,
"loss": 0.5913,
"step": 10850
},
{
"epoch": 2.795007077596191,
"grad_norm": 1.019864559173584,
"learning_rate": 2.2056628056628057e-05,
"loss": 0.5194,
"step": 10860
},
{
"epoch": 2.7975807489383606,
"grad_norm": 1.1062774658203125,
"learning_rate": 2.2030888030888032e-05,
"loss": 0.5451,
"step": 10870
},
{
"epoch": 2.8001544202805304,
"grad_norm": 0.8986897468566895,
"learning_rate": 2.2005148005148007e-05,
"loss": 0.5787,
"step": 10880
},
{
"epoch": 2.8027280916226998,
"grad_norm": 0.9067063331604004,
"learning_rate": 2.1979407979407982e-05,
"loss": 0.5901,
"step": 10890
},
{
"epoch": 2.8053017629648695,
"grad_norm": 1.141993522644043,
"learning_rate": 2.1953667953667957e-05,
"loss": 0.5681,
"step": 10900
},
{
"epoch": 2.807875434307039,
"grad_norm": 1.1285045146942139,
"learning_rate": 2.192792792792793e-05,
"loss": 0.5557,
"step": 10910
},
{
"epoch": 2.8104491056492087,
"grad_norm": 0.9529311656951904,
"learning_rate": 2.1902187902187904e-05,
"loss": 0.6106,
"step": 10920
},
{
"epoch": 2.813022776991378,
"grad_norm": 0.9633564949035645,
"learning_rate": 2.187644787644788e-05,
"loss": 0.521,
"step": 10930
},
{
"epoch": 2.815596448333548,
"grad_norm": 1.3736627101898193,
"learning_rate": 2.1850707850707854e-05,
"loss": 0.4961,
"step": 10940
},
{
"epoch": 2.8181701196757176,
"grad_norm": 1.1988080739974976,
"learning_rate": 2.1824967824967825e-05,
"loss": 0.5339,
"step": 10950
},
{
"epoch": 2.820743791017887,
"grad_norm": 3.6586461067199707,
"learning_rate": 2.17992277992278e-05,
"loss": 0.5535,
"step": 10960
},
{
"epoch": 2.8233174623600568,
"grad_norm": 1.6406865119934082,
"learning_rate": 2.1773487773487775e-05,
"loss": 0.5244,
"step": 10970
},
{
"epoch": 2.825891133702226,
"grad_norm": 1.2448114156723022,
"learning_rate": 2.174774774774775e-05,
"loss": 0.6124,
"step": 10980
},
{
"epoch": 2.828464805044396,
"grad_norm": 0.9966627955436707,
"learning_rate": 2.1722007722007722e-05,
"loss": 0.5213,
"step": 10990
},
{
"epoch": 2.8310384763865653,
"grad_norm": 1.5324152708053589,
"learning_rate": 2.1696267696267697e-05,
"loss": 0.5412,
"step": 11000
},
{
"epoch": 2.8310384763865653,
"eval_loss": 0.7792600989341736,
"eval_runtime": 395.6052,
"eval_samples_per_second": 49.105,
"eval_steps_per_second": 2.457,
"eval_token_accuracy": 0.00037447141262839833,
"step": 11000
},
{
"epoch": 2.833612147728735,
"grad_norm": 3.205904483795166,
"learning_rate": 2.1670527670527672e-05,
"loss": 0.4943,
"step": 11010
},
{
"epoch": 2.836185819070905,
"grad_norm": 1.0843708515167236,
"learning_rate": 2.1644787644787644e-05,
"loss": 0.5571,
"step": 11020
},
{
"epoch": 2.838759490413074,
"grad_norm": 0.9089128971099854,
"learning_rate": 2.161904761904762e-05,
"loss": 0.5604,
"step": 11030
},
{
"epoch": 2.841333161755244,
"grad_norm": 1.2132848501205444,
"learning_rate": 2.1593307593307594e-05,
"loss": 0.5426,
"step": 11040
},
{
"epoch": 2.8439068330974133,
"grad_norm": 1.2147984504699707,
"learning_rate": 2.156756756756757e-05,
"loss": 0.5833,
"step": 11050
},
{
"epoch": 2.846480504439583,
"grad_norm": 0.9623033404350281,
"learning_rate": 2.154182754182754e-05,
"loss": 0.5253,
"step": 11060
},
{
"epoch": 2.8490541757817525,
"grad_norm": 1.1936800479888916,
"learning_rate": 2.1516087516087515e-05,
"loss": 0.548,
"step": 11070
},
{
"epoch": 2.8516278471239223,
"grad_norm": 3.678395986557007,
"learning_rate": 2.1490347490347494e-05,
"loss": 0.5867,
"step": 11080
},
{
"epoch": 2.854201518466092,
"grad_norm": 3.4173741340637207,
"learning_rate": 2.1464607464607465e-05,
"loss": 0.4922,
"step": 11090
},
{
"epoch": 2.8567751898082614,
"grad_norm": 0.8987242579460144,
"learning_rate": 2.143886743886744e-05,
"loss": 0.5296,
"step": 11100
},
{
"epoch": 2.859348861150431,
"grad_norm": 1.1060086488723755,
"learning_rate": 2.1413127413127415e-05,
"loss": 0.52,
"step": 11110
},
{
"epoch": 2.8619225324926005,
"grad_norm": 0.9053888320922852,
"learning_rate": 2.138738738738739e-05,
"loss": 0.5979,
"step": 11120
},
{
"epoch": 2.8644962038347703,
"grad_norm": 0.9858607053756714,
"learning_rate": 2.1361647361647362e-05,
"loss": 0.5683,
"step": 11130
},
{
"epoch": 2.8670698751769397,
"grad_norm": 2.134103298187256,
"learning_rate": 2.1335907335907337e-05,
"loss": 0.5358,
"step": 11140
},
{
"epoch": 2.8696435465191095,
"grad_norm": 4.8553009033203125,
"learning_rate": 2.1310167310167312e-05,
"loss": 0.5975,
"step": 11150
},
{
"epoch": 2.8722172178612793,
"grad_norm": 1.1935124397277832,
"learning_rate": 2.1284427284427287e-05,
"loss": 0.6338,
"step": 11160
},
{
"epoch": 2.8747908892034486,
"grad_norm": 1.042869210243225,
"learning_rate": 2.125868725868726e-05,
"loss": 0.5529,
"step": 11170
},
{
"epoch": 2.8773645605456184,
"grad_norm": 0.7498083114624023,
"learning_rate": 2.1232947232947233e-05,
"loss": 0.6005,
"step": 11180
},
{
"epoch": 2.879938231887788,
"grad_norm": 1.3521268367767334,
"learning_rate": 2.120720720720721e-05,
"loss": 0.5645,
"step": 11190
},
{
"epoch": 2.8825119032299575,
"grad_norm": 1.2290796041488647,
"learning_rate": 2.1181467181467183e-05,
"loss": 0.561,
"step": 11200
},
{
"epoch": 2.885085574572127,
"grad_norm": 1.114356517791748,
"learning_rate": 2.1155727155727155e-05,
"loss": 0.5426,
"step": 11210
},
{
"epoch": 2.8876592459142967,
"grad_norm": 0.7884505391120911,
"learning_rate": 2.112998712998713e-05,
"loss": 0.5519,
"step": 11220
},
{
"epoch": 2.8902329172564665,
"grad_norm": 1.0232847929000854,
"learning_rate": 2.1104247104247105e-05,
"loss": 0.5406,
"step": 11230
},
{
"epoch": 2.892806588598636,
"grad_norm": 1.2134202718734741,
"learning_rate": 2.107850707850708e-05,
"loss": 0.537,
"step": 11240
},
{
"epoch": 2.8953802599408056,
"grad_norm": 1.1819850206375122,
"learning_rate": 2.105276705276705e-05,
"loss": 0.4655,
"step": 11250
},
{
"epoch": 2.8979539312829754,
"grad_norm": 0.9227721095085144,
"learning_rate": 2.1027027027027027e-05,
"loss": 0.5465,
"step": 11260
},
{
"epoch": 2.9005276026251448,
"grad_norm": 4.2423858642578125,
"learning_rate": 2.1001287001287e-05,
"loss": 0.5245,
"step": 11270
},
{
"epoch": 2.903101273967314,
"grad_norm": 1.0948383808135986,
"learning_rate": 2.0975546975546977e-05,
"loss": 0.5712,
"step": 11280
},
{
"epoch": 2.905674945309484,
"grad_norm": 0.783808171749115,
"learning_rate": 2.094980694980695e-05,
"loss": 0.5705,
"step": 11290
},
{
"epoch": 2.9082486166516537,
"grad_norm": 0.858773410320282,
"learning_rate": 2.0924066924066927e-05,
"loss": 0.5632,
"step": 11300
},
{
"epoch": 2.910822287993823,
"grad_norm": 4.24513578414917,
"learning_rate": 2.08983268983269e-05,
"loss": 0.5613,
"step": 11310
},
{
"epoch": 2.913395959335993,
"grad_norm": 1.336777925491333,
"learning_rate": 2.0872586872586873e-05,
"loss": 0.6024,
"step": 11320
},
{
"epoch": 2.9159696306781626,
"grad_norm": 0.7360126972198486,
"learning_rate": 2.0846846846846848e-05,
"loss": 0.5051,
"step": 11330
},
{
"epoch": 2.918543302020332,
"grad_norm": 0.9983276128768921,
"learning_rate": 2.0821106821106823e-05,
"loss": 0.5366,
"step": 11340
},
{
"epoch": 2.9211169733625018,
"grad_norm": 1.2790330648422241,
"learning_rate": 2.0795366795366798e-05,
"loss": 0.4972,
"step": 11350
},
{
"epoch": 2.923690644704671,
"grad_norm": 1.0925854444503784,
"learning_rate": 2.076962676962677e-05,
"loss": 0.4877,
"step": 11360
},
{
"epoch": 2.926264316046841,
"grad_norm": 1.262011170387268,
"learning_rate": 2.0743886743886745e-05,
"loss": 0.5177,
"step": 11370
},
{
"epoch": 2.9288379873890102,
"grad_norm": 0.7657809853553772,
"learning_rate": 2.071814671814672e-05,
"loss": 0.5845,
"step": 11380
},
{
"epoch": 2.93141165873118,
"grad_norm": 1.21810781955719,
"learning_rate": 2.0692406692406695e-05,
"loss": 0.4873,
"step": 11390
},
{
"epoch": 2.93398533007335,
"grad_norm": 1.1129614114761353,
"learning_rate": 2.0666666666666666e-05,
"loss": 0.5668,
"step": 11400
},
{
"epoch": 2.936559001415519,
"grad_norm": 1.1121175289154053,
"learning_rate": 2.064092664092664e-05,
"loss": 0.4534,
"step": 11410
},
{
"epoch": 2.939132672757689,
"grad_norm": 1.254805564880371,
"learning_rate": 2.0615186615186616e-05,
"loss": 0.5647,
"step": 11420
},
{
"epoch": 2.9417063440998583,
"grad_norm": 1.1753339767456055,
"learning_rate": 2.058944658944659e-05,
"loss": 0.5619,
"step": 11430
},
{
"epoch": 2.944280015442028,
"grad_norm": 1.9952584505081177,
"learning_rate": 2.0563706563706563e-05,
"loss": 0.5437,
"step": 11440
},
{
"epoch": 2.9468536867841975,
"grad_norm": 2.9970035552978516,
"learning_rate": 2.0537966537966538e-05,
"loss": 0.6106,
"step": 11450
},
{
"epoch": 2.9494273581263672,
"grad_norm": 1.2098712921142578,
"learning_rate": 2.0512226512226513e-05,
"loss": 0.5032,
"step": 11460
},
{
"epoch": 2.952001029468537,
"grad_norm": 2.842196464538574,
"learning_rate": 2.0486486486486488e-05,
"loss": 0.4807,
"step": 11470
},
{
"epoch": 2.9545747008107064,
"grad_norm": 0.9120272397994995,
"learning_rate": 2.046074646074646e-05,
"loss": 0.5058,
"step": 11480
},
{
"epoch": 2.957148372152876,
"grad_norm": 2.2939398288726807,
"learning_rate": 2.0435006435006435e-05,
"loss": 0.5666,
"step": 11490
},
{
"epoch": 2.9597220434950455,
"grad_norm": 1.4755898714065552,
"learning_rate": 2.0409266409266413e-05,
"loss": 0.4845,
"step": 11500
},
{
"epoch": 2.9622957148372153,
"grad_norm": 1.120060920715332,
"learning_rate": 2.0383526383526385e-05,
"loss": 0.4979,
"step": 11510
},
{
"epoch": 2.9648693861793847,
"grad_norm": 4.3705058097839355,
"learning_rate": 2.035778635778636e-05,
"loss": 0.6247,
"step": 11520
},
{
"epoch": 2.9674430575215545,
"grad_norm": 1.2147754430770874,
"learning_rate": 2.0332046332046335e-05,
"loss": 0.512,
"step": 11530
},
{
"epoch": 2.9700167288637243,
"grad_norm": 4.265937328338623,
"learning_rate": 2.030630630630631e-05,
"loss": 0.533,
"step": 11540
},
{
"epoch": 2.9725904002058936,
"grad_norm": 1.1540546417236328,
"learning_rate": 2.028056628056628e-05,
"loss": 0.5189,
"step": 11550
},
{
"epoch": 2.9751640715480634,
"grad_norm": 1.3933578729629517,
"learning_rate": 2.0254826254826256e-05,
"loss": 0.6381,
"step": 11560
},
{
"epoch": 2.977737742890233,
"grad_norm": 1.1027209758758545,
"learning_rate": 2.022908622908623e-05,
"loss": 0.5235,
"step": 11570
},
{
"epoch": 2.9803114142324025,
"grad_norm": 0.9615407586097717,
"learning_rate": 2.0203346203346203e-05,
"loss": 0.6184,
"step": 11580
},
{
"epoch": 2.982885085574572,
"grad_norm": 1.2538810968399048,
"learning_rate": 2.0177606177606178e-05,
"loss": 0.576,
"step": 11590
},
{
"epoch": 2.9854587569167417,
"grad_norm": 1.3477590084075928,
"learning_rate": 2.0151866151866153e-05,
"loss": 0.5825,
"step": 11600
},
{
"epoch": 2.9880324282589115,
"grad_norm": 4.23989725112915,
"learning_rate": 2.0126126126126128e-05,
"loss": 0.5581,
"step": 11610
},
{
"epoch": 2.990606099601081,
"grad_norm": 1.2521101236343384,
"learning_rate": 2.01003861003861e-05,
"loss": 0.6155,
"step": 11620
},
{
"epoch": 2.9931797709432506,
"grad_norm": 1.0629501342773438,
"learning_rate": 2.0074646074646074e-05,
"loss": 0.5349,
"step": 11630
},
{
"epoch": 2.9957534422854204,
"grad_norm": 1.3439209461212158,
"learning_rate": 2.004890604890605e-05,
"loss": 0.5551,
"step": 11640
},
{
"epoch": 2.9983271136275897,
"grad_norm": 0.9323076605796814,
"learning_rate": 2.0023166023166024e-05,
"loss": 0.5558,
"step": 11650
},
{
"epoch": 3.0009007849697595,
"grad_norm": 1.5501611232757568,
"learning_rate": 1.9997425997425996e-05,
"loss": 0.5076,
"step": 11660
},
{
"epoch": 3.003474456311929,
"grad_norm": 1.0962281227111816,
"learning_rate": 1.997168597168597e-05,
"loss": 0.4875,
"step": 11670
},
{
"epoch": 3.0060481276540987,
"grad_norm": 1.1923311948776245,
"learning_rate": 1.9945945945945946e-05,
"loss": 0.4643,
"step": 11680
},
{
"epoch": 3.008621798996268,
"grad_norm": 1.4216543436050415,
"learning_rate": 1.992020592020592e-05,
"loss": 0.4181,
"step": 11690
},
{
"epoch": 3.011195470338438,
"grad_norm": 1.0901702642440796,
"learning_rate": 1.9894465894465893e-05,
"loss": 0.4309,
"step": 11700
},
{
"epoch": 3.013769141680607,
"grad_norm": 0.9830016493797302,
"learning_rate": 1.986872586872587e-05,
"loss": 0.4027,
"step": 11710
},
{
"epoch": 3.016342813022777,
"grad_norm": 0.9537155032157898,
"learning_rate": 1.9842985842985846e-05,
"loss": 0.4108,
"step": 11720
},
{
"epoch": 3.0189164843649468,
"grad_norm": 1.4666250944137573,
"learning_rate": 1.9817245817245818e-05,
"loss": 0.4573,
"step": 11730
},
{
"epoch": 3.021490155707116,
"grad_norm": 1.1737228631973267,
"learning_rate": 1.9791505791505793e-05,
"loss": 0.4616,
"step": 11740
},
{
"epoch": 3.024063827049286,
"grad_norm": 0.9376081824302673,
"learning_rate": 1.9765765765765768e-05,
"loss": 0.4141,
"step": 11750
},
{
"epoch": 3.0266374983914552,
"grad_norm": 3.225501775741577,
"learning_rate": 1.9740025740025743e-05,
"loss": 0.4542,
"step": 11760
},
{
"epoch": 3.029211169733625,
"grad_norm": 0.9168078303337097,
"learning_rate": 1.9714285714285714e-05,
"loss": 0.4518,
"step": 11770
},
{
"epoch": 3.031784841075795,
"grad_norm": 1.606330394744873,
"learning_rate": 1.968854568854569e-05,
"loss": 0.3725,
"step": 11780
},
{
"epoch": 3.034358512417964,
"grad_norm": 3.959381341934204,
"learning_rate": 1.9662805662805664e-05,
"loss": 0.4397,
"step": 11790
},
{
"epoch": 3.036932183760134,
"grad_norm": 0.9928016066551208,
"learning_rate": 1.963706563706564e-05,
"loss": 0.4031,
"step": 11800
},
{
"epoch": 3.0395058551023033,
"grad_norm": 1.3497694730758667,
"learning_rate": 1.961132561132561e-05,
"loss": 0.4435,
"step": 11810
},
{
"epoch": 3.042079526444473,
"grad_norm": 1.0364716053009033,
"learning_rate": 1.9585585585585586e-05,
"loss": 0.4138,
"step": 11820
},
{
"epoch": 3.0446531977866425,
"grad_norm": 1.0680062770843506,
"learning_rate": 1.955984555984556e-05,
"loss": 0.448,
"step": 11830
},
{
"epoch": 3.0472268691288122,
"grad_norm": 0.7505292892456055,
"learning_rate": 1.9534105534105536e-05,
"loss": 0.4604,
"step": 11840
},
{
"epoch": 3.049800540470982,
"grad_norm": 1.549533724784851,
"learning_rate": 1.9508365508365508e-05,
"loss": 0.4082,
"step": 11850
},
{
"epoch": 3.0523742118131514,
"grad_norm": 1.0561308860778809,
"learning_rate": 1.9482625482625483e-05,
"loss": 0.4062,
"step": 11860
},
{
"epoch": 3.054947883155321,
"grad_norm": 1.0458426475524902,
"learning_rate": 1.9456885456885458e-05,
"loss": 0.4374,
"step": 11870
},
{
"epoch": 3.0575215544974905,
"grad_norm": 0.9172849059104919,
"learning_rate": 1.9431145431145433e-05,
"loss": 0.4426,
"step": 11880
},
{
"epoch": 3.0600952258396603,
"grad_norm": 1.1287096738815308,
"learning_rate": 1.9405405405405404e-05,
"loss": 0.4038,
"step": 11890
},
{
"epoch": 3.0626688971818297,
"grad_norm": 1.0460259914398193,
"learning_rate": 1.937966537966538e-05,
"loss": 0.3982,
"step": 11900
},
{
"epoch": 3.0652425685239995,
"grad_norm": 1.1148087978363037,
"learning_rate": 1.9353925353925358e-05,
"loss": 0.4112,
"step": 11910
},
{
"epoch": 3.0678162398661692,
"grad_norm": 1.347355604171753,
"learning_rate": 1.932818532818533e-05,
"loss": 0.4286,
"step": 11920
},
{
"epoch": 3.0703899112083386,
"grad_norm": 1.4332010746002197,
"learning_rate": 1.9302445302445304e-05,
"loss": 0.3971,
"step": 11930
},
{
"epoch": 3.0729635825505084,
"grad_norm": 2.455965042114258,
"learning_rate": 1.927670527670528e-05,
"loss": 0.4328,
"step": 11940
},
{
"epoch": 3.0755372538926777,
"grad_norm": 1.319761037826538,
"learning_rate": 1.9250965250965254e-05,
"loss": 0.4166,
"step": 11950
},
{
"epoch": 3.0781109252348475,
"grad_norm": 1.1820671558380127,
"learning_rate": 1.9225225225225226e-05,
"loss": 0.4594,
"step": 11960
},
{
"epoch": 3.0806845965770173,
"grad_norm": 0.905479907989502,
"learning_rate": 1.91994851994852e-05,
"loss": 0.4466,
"step": 11970
},
{
"epoch": 3.0832582679191867,
"grad_norm": 1.3796758651733398,
"learning_rate": 1.9173745173745176e-05,
"loss": 0.398,
"step": 11980
},
{
"epoch": 3.0858319392613565,
"grad_norm": 1.7083042860031128,
"learning_rate": 1.914800514800515e-05,
"loss": 0.4554,
"step": 11990
},
{
"epoch": 3.088405610603526,
"grad_norm": 1.0998810529708862,
"learning_rate": 1.9122265122265122e-05,
"loss": 0.4031,
"step": 12000
},
{
"epoch": 3.088405610603526,
"eval_loss": 0.8223614692687988,
"eval_runtime": 395.6102,
"eval_samples_per_second": 49.104,
"eval_steps_per_second": 2.457,
"eval_token_accuracy": 0.00035795898845269097,
"step": 12000
},
{
"epoch": 3.0909792819456956,
"grad_norm": 0.8669257760047913,
"learning_rate": 1.9096525096525097e-05,
"loss": 0.4385,
"step": 12010
},
{
"epoch": 3.093552953287865,
"grad_norm": 1.6666041612625122,
"learning_rate": 1.9070785070785072e-05,
"loss": 0.4202,
"step": 12020
},
{
"epoch": 3.0961266246300347,
"grad_norm": 1.2697246074676514,
"learning_rate": 1.9045045045045047e-05,
"loss": 0.4472,
"step": 12030
},
{
"epoch": 3.0987002959722045,
"grad_norm": 1.0847759246826172,
"learning_rate": 1.901930501930502e-05,
"loss": 0.4642,
"step": 12040
},
{
"epoch": 3.101273967314374,
"grad_norm": 2.803271532058716,
"learning_rate": 1.8993564993564994e-05,
"loss": 0.4555,
"step": 12050
},
{
"epoch": 3.1038476386565437,
"grad_norm": 1.1774652004241943,
"learning_rate": 1.896782496782497e-05,
"loss": 0.472,
"step": 12060
},
{
"epoch": 3.106421309998713,
"grad_norm": 3.2696588039398193,
"learning_rate": 1.894208494208494e-05,
"loss": 0.4239,
"step": 12070
},
{
"epoch": 3.108994981340883,
"grad_norm": 1.0834957361221313,
"learning_rate": 1.8916344916344916e-05,
"loss": 0.3855,
"step": 12080
},
{
"epoch": 3.111568652683052,
"grad_norm": 1.2990940809249878,
"learning_rate": 1.889060489060489e-05,
"loss": 0.4416,
"step": 12090
},
{
"epoch": 3.114142324025222,
"grad_norm": 5.679697036743164,
"learning_rate": 1.8864864864864866e-05,
"loss": 0.4118,
"step": 12100
},
{
"epoch": 3.1167159953673917,
"grad_norm": 1.1892821788787842,
"learning_rate": 1.8839124839124837e-05,
"loss": 0.3947,
"step": 12110
},
{
"epoch": 3.119289666709561,
"grad_norm": 3.293264389038086,
"learning_rate": 1.8813384813384816e-05,
"loss": 0.4016,
"step": 12120
},
{
"epoch": 3.121863338051731,
"grad_norm": 1.1677567958831787,
"learning_rate": 1.878764478764479e-05,
"loss": 0.4077,
"step": 12130
},
{
"epoch": 3.1244370093939002,
"grad_norm": 1.0109992027282715,
"learning_rate": 1.8761904761904766e-05,
"loss": 0.3985,
"step": 12140
},
{
"epoch": 3.12701068073607,
"grad_norm": 1.0192488431930542,
"learning_rate": 1.8736164736164737e-05,
"loss": 0.4088,
"step": 12150
},
{
"epoch": 3.12958435207824,
"grad_norm": 2.916017532348633,
"learning_rate": 1.8710424710424712e-05,
"loss": 0.4372,
"step": 12160
},
{
"epoch": 3.132158023420409,
"grad_norm": 1.1380913257598877,
"learning_rate": 1.8684684684684687e-05,
"loss": 0.4914,
"step": 12170
},
{
"epoch": 3.134731694762579,
"grad_norm": 1.2204816341400146,
"learning_rate": 1.865894465894466e-05,
"loss": 0.3692,
"step": 12180
},
{
"epoch": 3.1373053661047483,
"grad_norm": 5.1090803146362305,
"learning_rate": 1.8633204633204634e-05,
"loss": 0.4168,
"step": 12190
},
{
"epoch": 3.139879037446918,
"grad_norm": 5.300877094268799,
"learning_rate": 1.860746460746461e-05,
"loss": 0.4533,
"step": 12200
},
{
"epoch": 3.1424527087890874,
"grad_norm": 1.053403377532959,
"learning_rate": 1.8581724581724584e-05,
"loss": 0.4207,
"step": 12210
},
{
"epoch": 3.1450263801312572,
"grad_norm": 0.8829357028007507,
"learning_rate": 1.8555984555984555e-05,
"loss": 0.4247,
"step": 12220
},
{
"epoch": 3.147600051473427,
"grad_norm": 1.015411138534546,
"learning_rate": 1.853024453024453e-05,
"loss": 0.4154,
"step": 12230
},
{
"epoch": 3.1501737228155964,
"grad_norm": 4.939092636108398,
"learning_rate": 1.8504504504504505e-05,
"loss": 0.3905,
"step": 12240
},
{
"epoch": 3.152747394157766,
"grad_norm": 1.0195497274398804,
"learning_rate": 1.847876447876448e-05,
"loss": 0.4135,
"step": 12250
},
{
"epoch": 3.1553210654999355,
"grad_norm": 1.3973890542984009,
"learning_rate": 1.8453024453024452e-05,
"loss": 0.4462,
"step": 12260
},
{
"epoch": 3.1578947368421053,
"grad_norm": 1.9131966829299927,
"learning_rate": 1.8427284427284427e-05,
"loss": 0.4195,
"step": 12270
},
{
"epoch": 3.1604684081842747,
"grad_norm": 1.4273961782455444,
"learning_rate": 1.8401544401544402e-05,
"loss": 0.4078,
"step": 12280
},
{
"epoch": 3.1630420795264445,
"grad_norm": 0.850229024887085,
"learning_rate": 1.8375804375804377e-05,
"loss": 0.4249,
"step": 12290
},
{
"epoch": 3.1656157508686142,
"grad_norm": 1.3435311317443848,
"learning_rate": 1.835006435006435e-05,
"loss": 0.4236,
"step": 12300
},
{
"epoch": 3.1681894222107836,
"grad_norm": 1.1101402044296265,
"learning_rate": 1.8324324324324324e-05,
"loss": 0.4806,
"step": 12310
},
{
"epoch": 3.1707630935529534,
"grad_norm": 1.9284107685089111,
"learning_rate": 1.82985842985843e-05,
"loss": 0.4059,
"step": 12320
},
{
"epoch": 3.1733367648951227,
"grad_norm": 0.9383375644683838,
"learning_rate": 1.8272844272844274e-05,
"loss": 0.4593,
"step": 12330
},
{
"epoch": 3.1759104362372925,
"grad_norm": 1.1090718507766724,
"learning_rate": 1.824710424710425e-05,
"loss": 0.451,
"step": 12340
},
{
"epoch": 3.178484107579462,
"grad_norm": 3.4713733196258545,
"learning_rate": 1.8221364221364224e-05,
"loss": 0.4403,
"step": 12350
},
{
"epoch": 3.1810577789216317,
"grad_norm": 1.001657485961914,
"learning_rate": 1.81956241956242e-05,
"loss": 0.4593,
"step": 12360
},
{
"epoch": 3.1836314502638015,
"grad_norm": 0.9134547710418701,
"learning_rate": 1.816988416988417e-05,
"loss": 0.4513,
"step": 12370
},
{
"epoch": 3.186205121605971,
"grad_norm": 1.105063557624817,
"learning_rate": 1.8144144144144145e-05,
"loss": 0.4476,
"step": 12380
},
{
"epoch": 3.1887787929481406,
"grad_norm": 1.3801063299179077,
"learning_rate": 1.811840411840412e-05,
"loss": 0.4312,
"step": 12390
},
{
"epoch": 3.19135246429031,
"grad_norm": 5.133249282836914,
"learning_rate": 1.8092664092664095e-05,
"loss": 0.4124,
"step": 12400
},
{
"epoch": 3.1939261356324797,
"grad_norm": 1.1436964273452759,
"learning_rate": 1.8066924066924067e-05,
"loss": 0.4242,
"step": 12410
},
{
"epoch": 3.1964998069746495,
"grad_norm": 0.8527979850769043,
"learning_rate": 1.8041184041184042e-05,
"loss": 0.4159,
"step": 12420
},
{
"epoch": 3.199073478316819,
"grad_norm": 1.0988205671310425,
"learning_rate": 1.8015444015444017e-05,
"loss": 0.3943,
"step": 12430
},
{
"epoch": 3.2016471496589887,
"grad_norm": 2.3774611949920654,
"learning_rate": 1.7989703989703992e-05,
"loss": 0.4405,
"step": 12440
},
{
"epoch": 3.204220821001158,
"grad_norm": 1.3425320386886597,
"learning_rate": 1.7963963963963963e-05,
"loss": 0.4397,
"step": 12450
},
{
"epoch": 3.206794492343328,
"grad_norm": 1.1551008224487305,
"learning_rate": 1.793822393822394e-05,
"loss": 0.4177,
"step": 12460
},
{
"epoch": 3.209368163685497,
"grad_norm": 1.0739468336105347,
"learning_rate": 1.7912483912483913e-05,
"loss": 0.4226,
"step": 12470
},
{
"epoch": 3.211941835027667,
"grad_norm": 1.012554407119751,
"learning_rate": 1.788674388674389e-05,
"loss": 0.4394,
"step": 12480
},
{
"epoch": 3.2145155063698367,
"grad_norm": 1.0329439640045166,
"learning_rate": 1.786100386100386e-05,
"loss": 0.4219,
"step": 12490
},
{
"epoch": 3.217089177712006,
"grad_norm": 1.340021014213562,
"learning_rate": 1.7835263835263835e-05,
"loss": 0.4431,
"step": 12500
},
{
"epoch": 3.219662849054176,
"grad_norm": 0.9239644408226013,
"learning_rate": 1.780952380952381e-05,
"loss": 0.4128,
"step": 12510
},
{
"epoch": 3.2222365203963452,
"grad_norm": 1.1054936647415161,
"learning_rate": 1.7783783783783785e-05,
"loss": 0.4215,
"step": 12520
},
{
"epoch": 3.224810191738515,
"grad_norm": 4.010881423950195,
"learning_rate": 1.7758043758043757e-05,
"loss": 0.3813,
"step": 12530
},
{
"epoch": 3.227383863080685,
"grad_norm": 1.6302077770233154,
"learning_rate": 1.7732303732303735e-05,
"loss": 0.427,
"step": 12540
},
{
"epoch": 3.229957534422854,
"grad_norm": 4.041463851928711,
"learning_rate": 1.770656370656371e-05,
"loss": 0.4766,
"step": 12550
},
{
"epoch": 3.232531205765024,
"grad_norm": 1.1331210136413574,
"learning_rate": 1.768082368082368e-05,
"loss": 0.4566,
"step": 12560
},
{
"epoch": 3.2351048771071933,
"grad_norm": 1.1952770948410034,
"learning_rate": 1.7655083655083657e-05,
"loss": 0.4393,
"step": 12570
},
{
"epoch": 3.237678548449363,
"grad_norm": 1.2376477718353271,
"learning_rate": 1.762934362934363e-05,
"loss": 0.4346,
"step": 12580
},
{
"epoch": 3.2402522197915324,
"grad_norm": 0.931395947933197,
"learning_rate": 1.7603603603603607e-05,
"loss": 0.4355,
"step": 12590
},
{
"epoch": 3.2428258911337022,
"grad_norm": 0.7813390493392944,
"learning_rate": 1.7577863577863578e-05,
"loss": 0.459,
"step": 12600
},
{
"epoch": 3.245399562475872,
"grad_norm": 0.9730493426322937,
"learning_rate": 1.7552123552123553e-05,
"loss": 0.4541,
"step": 12610
},
{
"epoch": 3.2479732338180414,
"grad_norm": 1.0856324434280396,
"learning_rate": 1.7526383526383528e-05,
"loss": 0.4046,
"step": 12620
},
{
"epoch": 3.250546905160211,
"grad_norm": 1.2812646627426147,
"learning_rate": 1.7500643500643503e-05,
"loss": 0.4322,
"step": 12630
},
{
"epoch": 3.2531205765023805,
"grad_norm": 1.2020498514175415,
"learning_rate": 1.7474903474903475e-05,
"loss": 0.4366,
"step": 12640
},
{
"epoch": 3.2556942478445503,
"grad_norm": 1.462583065032959,
"learning_rate": 1.744916344916345e-05,
"loss": 0.3911,
"step": 12650
},
{
"epoch": 3.2582679191867197,
"grad_norm": 0.9804076552391052,
"learning_rate": 1.7423423423423425e-05,
"loss": 0.501,
"step": 12660
},
{
"epoch": 3.2608415905288894,
"grad_norm": 1.153205156326294,
"learning_rate": 1.7397683397683396e-05,
"loss": 0.4456,
"step": 12670
},
{
"epoch": 3.2634152618710592,
"grad_norm": 0.8798128366470337,
"learning_rate": 1.737194337194337e-05,
"loss": 0.4622,
"step": 12680
},
{
"epoch": 3.2659889332132286,
"grad_norm": 1.3517348766326904,
"learning_rate": 1.7346203346203346e-05,
"loss": 0.4705,
"step": 12690
},
{
"epoch": 3.2685626045553984,
"grad_norm": 1.0517302751541138,
"learning_rate": 1.732046332046332e-05,
"loss": 0.502,
"step": 12700
},
{
"epoch": 3.2711362758975677,
"grad_norm": 0.8883270621299744,
"learning_rate": 1.7294723294723293e-05,
"loss": 0.4055,
"step": 12710
},
{
"epoch": 3.2737099472397375,
"grad_norm": 0.9611861109733582,
"learning_rate": 1.7268983268983268e-05,
"loss": 0.4462,
"step": 12720
},
{
"epoch": 3.276283618581907,
"grad_norm": 0.9720549583435059,
"learning_rate": 1.7243243243243243e-05,
"loss": 0.4339,
"step": 12730
},
{
"epoch": 3.2788572899240767,
"grad_norm": 1.8205825090408325,
"learning_rate": 1.7217503217503218e-05,
"loss": 0.4815,
"step": 12740
},
{
"epoch": 3.2814309612662464,
"grad_norm": 1.316831111907959,
"learning_rate": 1.7191763191763193e-05,
"loss": 0.4655,
"step": 12750
},
{
"epoch": 3.284004632608416,
"grad_norm": 1.3125663995742798,
"learning_rate": 1.7166023166023168e-05,
"loss": 0.4719,
"step": 12760
},
{
"epoch": 3.2865783039505856,
"grad_norm": 1.2112400531768799,
"learning_rate": 1.7140283140283143e-05,
"loss": 0.4137,
"step": 12770
},
{
"epoch": 3.289151975292755,
"grad_norm": 1.2232820987701416,
"learning_rate": 1.7114543114543115e-05,
"loss": 0.4392,
"step": 12780
},
{
"epoch": 3.2917256466349247,
"grad_norm": 1.1535100936889648,
"learning_rate": 1.708880308880309e-05,
"loss": 0.4628,
"step": 12790
},
{
"epoch": 3.294299317977094,
"grad_norm": 1.443505048751831,
"learning_rate": 1.7063063063063065e-05,
"loss": 0.4385,
"step": 12800
},
{
"epoch": 3.296872989319264,
"grad_norm": 1.0268503427505493,
"learning_rate": 1.703732303732304e-05,
"loss": 0.4135,
"step": 12810
},
{
"epoch": 3.2994466606614337,
"grad_norm": 0.8816016316413879,
"learning_rate": 1.701158301158301e-05,
"loss": 0.4897,
"step": 12820
},
{
"epoch": 3.302020332003603,
"grad_norm": 1.4715735912322998,
"learning_rate": 1.6985842985842986e-05,
"loss": 0.4285,
"step": 12830
},
{
"epoch": 3.304594003345773,
"grad_norm": 1.63077974319458,
"learning_rate": 1.696010296010296e-05,
"loss": 0.4841,
"step": 12840
},
{
"epoch": 3.3071676746879426,
"grad_norm": 1.157626748085022,
"learning_rate": 1.6934362934362936e-05,
"loss": 0.4523,
"step": 12850
},
{
"epoch": 3.309741346030112,
"grad_norm": 2.7212443351745605,
"learning_rate": 1.6908622908622908e-05,
"loss": 0.4134,
"step": 12860
},
{
"epoch": 3.3123150173722817,
"grad_norm": 0.8014114499092102,
"learning_rate": 1.6882882882882883e-05,
"loss": 0.3669,
"step": 12870
},
{
"epoch": 3.314888688714451,
"grad_norm": 1.2325034141540527,
"learning_rate": 1.6857142857142858e-05,
"loss": 0.4272,
"step": 12880
},
{
"epoch": 3.317462360056621,
"grad_norm": 0.9247467517852783,
"learning_rate": 1.6831402831402833e-05,
"loss": 0.4294,
"step": 12890
},
{
"epoch": 3.32003603139879,
"grad_norm": 0.9387899041175842,
"learning_rate": 1.6805662805662805e-05,
"loss": 0.4372,
"step": 12900
},
{
"epoch": 3.32260970274096,
"grad_norm": 3.1787610054016113,
"learning_rate": 1.677992277992278e-05,
"loss": 0.4523,
"step": 12910
},
{
"epoch": 3.32518337408313,
"grad_norm": 1.0653231143951416,
"learning_rate": 1.6754182754182755e-05,
"loss": 0.4646,
"step": 12920
},
{
"epoch": 3.327757045425299,
"grad_norm": 1.1255438327789307,
"learning_rate": 1.672844272844273e-05,
"loss": 0.4399,
"step": 12930
},
{
"epoch": 3.330330716767469,
"grad_norm": 1.021522879600525,
"learning_rate": 1.67027027027027e-05,
"loss": 0.4362,
"step": 12940
},
{
"epoch": 3.3329043881096383,
"grad_norm": 1.3337011337280273,
"learning_rate": 1.667696267696268e-05,
"loss": 0.4598,
"step": 12950
},
{
"epoch": 3.335478059451808,
"grad_norm": 1.0208321809768677,
"learning_rate": 1.6651222651222654e-05,
"loss": 0.4132,
"step": 12960
},
{
"epoch": 3.3380517307939774,
"grad_norm": 1.224876046180725,
"learning_rate": 1.6625482625482626e-05,
"loss": 0.412,
"step": 12970
},
{
"epoch": 3.3406254021361472,
"grad_norm": 1.0516496896743774,
"learning_rate": 1.65997425997426e-05,
"loss": 0.4059,
"step": 12980
},
{
"epoch": 3.343199073478317,
"grad_norm": 1.2751177549362183,
"learning_rate": 1.6574002574002576e-05,
"loss": 0.4684,
"step": 12990
},
{
"epoch": 3.3457727448204864,
"grad_norm": 1.3460466861724854,
"learning_rate": 1.654826254826255e-05,
"loss": 0.4497,
"step": 13000
},
{
"epoch": 3.3457727448204864,
"eval_loss": 0.8037166595458984,
"eval_runtime": 395.705,
"eval_samples_per_second": 49.092,
"eval_steps_per_second": 2.456,
"eval_token_accuracy": 0.00035492222078819304,
"step": 13000
},
{
"epoch": 3.348346416162656,
"grad_norm": 1.3930424451828003,
"learning_rate": 1.6522522522522523e-05,
"loss": 0.3934,
"step": 13010
},
{
"epoch": 3.3509200875048255,
"grad_norm": 1.0718811750411987,
"learning_rate": 1.6496782496782498e-05,
"loss": 0.439,
"step": 13020
},
{
"epoch": 3.3534937588469953,
"grad_norm": 0.9547449946403503,
"learning_rate": 1.6471042471042473e-05,
"loss": 0.4055,
"step": 13030
},
{
"epoch": 3.3560674301891646,
"grad_norm": 1.0712281465530396,
"learning_rate": 1.6445302445302448e-05,
"loss": 0.4388,
"step": 13040
},
{
"epoch": 3.3586411015313344,
"grad_norm": 1.256049394607544,
"learning_rate": 1.641956241956242e-05,
"loss": 0.5076,
"step": 13050
},
{
"epoch": 3.3612147728735042,
"grad_norm": 0.8553959131240845,
"learning_rate": 1.6393822393822394e-05,
"loss": 0.4392,
"step": 13060
},
{
"epoch": 3.3637884442156736,
"grad_norm": 0.9777474403381348,
"learning_rate": 1.636808236808237e-05,
"loss": 0.4033,
"step": 13070
},
{
"epoch": 3.3663621155578434,
"grad_norm": 1.901464819908142,
"learning_rate": 1.6342342342342344e-05,
"loss": 0.446,
"step": 13080
},
{
"epoch": 3.3689357869000127,
"grad_norm": 1.1458752155303955,
"learning_rate": 1.6316602316602316e-05,
"loss": 0.4282,
"step": 13090
},
{
"epoch": 3.3715094582421825,
"grad_norm": 1.075851321220398,
"learning_rate": 1.629086229086229e-05,
"loss": 0.4591,
"step": 13100
},
{
"epoch": 3.374083129584352,
"grad_norm": 1.1775199174880981,
"learning_rate": 1.6265122265122266e-05,
"loss": 0.4184,
"step": 13110
},
{
"epoch": 3.3766568009265217,
"grad_norm": 1.3904964923858643,
"learning_rate": 1.623938223938224e-05,
"loss": 0.4494,
"step": 13120
},
{
"epoch": 3.3792304722686914,
"grad_norm": 1.2566157579421997,
"learning_rate": 1.6213642213642213e-05,
"loss": 0.483,
"step": 13130
},
{
"epoch": 3.381804143610861,
"grad_norm": 0.7696701288223267,
"learning_rate": 1.6187902187902188e-05,
"loss": 0.4492,
"step": 13140
},
{
"epoch": 3.3843778149530306,
"grad_norm": 1.138867974281311,
"learning_rate": 1.6162162162162163e-05,
"loss": 0.448,
"step": 13150
},
{
"epoch": 3.3869514862952,
"grad_norm": 1.0716248750686646,
"learning_rate": 1.6136422136422138e-05,
"loss": 0.403,
"step": 13160
},
{
"epoch": 3.3895251576373697,
"grad_norm": 1.0408307313919067,
"learning_rate": 1.6110682110682113e-05,
"loss": 0.4505,
"step": 13170
},
{
"epoch": 3.392098828979539,
"grad_norm": 1.176862120628357,
"learning_rate": 1.6084942084942088e-05,
"loss": 0.4501,
"step": 13180
},
{
"epoch": 3.394672500321709,
"grad_norm": 1.18982994556427,
"learning_rate": 1.6059202059202063e-05,
"loss": 0.4152,
"step": 13190
},
{
"epoch": 3.3972461716638787,
"grad_norm": 3.0527262687683105,
"learning_rate": 1.6033462033462034e-05,
"loss": 0.4094,
"step": 13200
},
{
"epoch": 3.399819843006048,
"grad_norm": 1.14426589012146,
"learning_rate": 1.600772200772201e-05,
"loss": 0.4099,
"step": 13210
},
{
"epoch": 3.402393514348218,
"grad_norm": 1.081701397895813,
"learning_rate": 1.5981981981981984e-05,
"loss": 0.428,
"step": 13220
},
{
"epoch": 3.404967185690387,
"grad_norm": 1.6688580513000488,
"learning_rate": 1.5956241956241956e-05,
"loss": 0.5,
"step": 13230
},
{
"epoch": 3.407540857032557,
"grad_norm": 1.1758854389190674,
"learning_rate": 1.593050193050193e-05,
"loss": 0.4582,
"step": 13240
},
{
"epoch": 3.4101145283747267,
"grad_norm": 1.9258641004562378,
"learning_rate": 1.5904761904761906e-05,
"loss": 0.4227,
"step": 13250
},
{
"epoch": 3.412688199716896,
"grad_norm": 0.9658583402633667,
"learning_rate": 1.587902187902188e-05,
"loss": 0.4391,
"step": 13260
},
{
"epoch": 3.415261871059066,
"grad_norm": 1.136678695678711,
"learning_rate": 1.5853281853281852e-05,
"loss": 0.4561,
"step": 13270
},
{
"epoch": 3.417835542401235,
"grad_norm": 1.336816668510437,
"learning_rate": 1.5827541827541827e-05,
"loss": 0.4605,
"step": 13280
},
{
"epoch": 3.420409213743405,
"grad_norm": 1.2746632099151611,
"learning_rate": 1.5801801801801802e-05,
"loss": 0.4618,
"step": 13290
},
{
"epoch": 3.422982885085575,
"grad_norm": 1.2792760133743286,
"learning_rate": 1.5776061776061777e-05,
"loss": 0.4101,
"step": 13300
},
{
"epoch": 3.425556556427744,
"grad_norm": 1.0193665027618408,
"learning_rate": 1.575032175032175e-05,
"loss": 0.4173,
"step": 13310
},
{
"epoch": 3.428130227769914,
"grad_norm": 1.210837721824646,
"learning_rate": 1.5724581724581724e-05,
"loss": 0.4466,
"step": 13320
},
{
"epoch": 3.4307038991120833,
"grad_norm": 0.9081295132637024,
"learning_rate": 1.56988416988417e-05,
"loss": 0.4679,
"step": 13330
},
{
"epoch": 3.433277570454253,
"grad_norm": 1.0284429788589478,
"learning_rate": 1.5673101673101674e-05,
"loss": 0.4082,
"step": 13340
},
{
"epoch": 3.4358512417964224,
"grad_norm": 1.0147459506988525,
"learning_rate": 1.5647361647361646e-05,
"loss": 0.4096,
"step": 13350
},
{
"epoch": 3.438424913138592,
"grad_norm": 3.9549002647399902,
"learning_rate": 1.562162162162162e-05,
"loss": 0.4907,
"step": 13360
},
{
"epoch": 3.440998584480762,
"grad_norm": 1.1533993482589722,
"learning_rate": 1.55958815958816e-05,
"loss": 0.4478,
"step": 13370
},
{
"epoch": 3.4435722558229314,
"grad_norm": 1.3518677949905396,
"learning_rate": 1.557014157014157e-05,
"loss": 0.4654,
"step": 13380
},
{
"epoch": 3.446145927165101,
"grad_norm": 1.235627293586731,
"learning_rate": 1.5544401544401546e-05,
"loss": 0.4478,
"step": 13390
},
{
"epoch": 3.4487195985072705,
"grad_norm": 1.4760185480117798,
"learning_rate": 1.551866151866152e-05,
"loss": 0.4625,
"step": 13400
},
{
"epoch": 3.4512932698494403,
"grad_norm": 1.0546268224716187,
"learning_rate": 1.5492921492921496e-05,
"loss": 0.3887,
"step": 13410
},
{
"epoch": 3.4538669411916096,
"grad_norm": 0.971731960773468,
"learning_rate": 1.5467181467181467e-05,
"loss": 0.4381,
"step": 13420
},
{
"epoch": 3.4564406125337794,
"grad_norm": 1.2670224905014038,
"learning_rate": 1.5441441441441442e-05,
"loss": 0.5249,
"step": 13430
},
{
"epoch": 3.4590142838759492,
"grad_norm": 1.4726381301879883,
"learning_rate": 1.5415701415701417e-05,
"loss": 0.4644,
"step": 13440
},
{
"epoch": 3.4615879552181186,
"grad_norm": 0.9591072797775269,
"learning_rate": 1.5389961389961392e-05,
"loss": 0.4573,
"step": 13450
},
{
"epoch": 3.4641616265602884,
"grad_norm": 1.0489442348480225,
"learning_rate": 1.5364221364221364e-05,
"loss": 0.4467,
"step": 13460
},
{
"epoch": 3.4667352979024577,
"grad_norm": 1.2839806079864502,
"learning_rate": 1.533848133848134e-05,
"loss": 0.4844,
"step": 13470
},
{
"epoch": 3.4693089692446275,
"grad_norm": 1.2316621541976929,
"learning_rate": 1.5312741312741314e-05,
"loss": 0.4506,
"step": 13480
},
{
"epoch": 3.471882640586797,
"grad_norm": 1.1504662036895752,
"learning_rate": 1.528700128700129e-05,
"loss": 0.3969,
"step": 13490
},
{
"epoch": 3.4744563119289666,
"grad_norm": 1.4414169788360596,
"learning_rate": 1.526126126126126e-05,
"loss": 0.433,
"step": 13500
},
{
"epoch": 3.4770299832711364,
"grad_norm": 1.1712054014205933,
"learning_rate": 1.5235521235521235e-05,
"loss": 0.4622,
"step": 13510
},
{
"epoch": 3.479603654613306,
"grad_norm": 0.9692360758781433,
"learning_rate": 1.520978120978121e-05,
"loss": 0.4323,
"step": 13520
},
{
"epoch": 3.4821773259554756,
"grad_norm": 3.664949655532837,
"learning_rate": 1.5184041184041184e-05,
"loss": 0.4761,
"step": 13530
},
{
"epoch": 3.484750997297645,
"grad_norm": 1.2114914655685425,
"learning_rate": 1.5158301158301159e-05,
"loss": 0.5089,
"step": 13540
},
{
"epoch": 3.4873246686398147,
"grad_norm": 1.1861014366149902,
"learning_rate": 1.5132561132561132e-05,
"loss": 0.4537,
"step": 13550
},
{
"epoch": 3.489898339981984,
"grad_norm": 1.2321648597717285,
"learning_rate": 1.5106821106821107e-05,
"loss": 0.5039,
"step": 13560
},
{
"epoch": 3.492472011324154,
"grad_norm": 1.0023382902145386,
"learning_rate": 1.508108108108108e-05,
"loss": 0.4347,
"step": 13570
},
{
"epoch": 3.4950456826663236,
"grad_norm": 1.5064911842346191,
"learning_rate": 1.5055341055341057e-05,
"loss": 0.4311,
"step": 13580
},
{
"epoch": 3.497619354008493,
"grad_norm": 2.946744203567505,
"learning_rate": 1.5029601029601032e-05,
"loss": 0.4479,
"step": 13590
},
{
"epoch": 3.500193025350663,
"grad_norm": 3.439570188522339,
"learning_rate": 1.5003861003861005e-05,
"loss": 0.4942,
"step": 13600
},
{
"epoch": 3.5027666966928326,
"grad_norm": 1.228228211402893,
"learning_rate": 1.497812097812098e-05,
"loss": 0.4554,
"step": 13610
},
{
"epoch": 3.505340368035002,
"grad_norm": 1.0095043182373047,
"learning_rate": 1.4952380952380954e-05,
"loss": 0.4988,
"step": 13620
},
{
"epoch": 3.5079140393771713,
"grad_norm": 1.0728389024734497,
"learning_rate": 1.4926640926640929e-05,
"loss": 0.5008,
"step": 13630
},
{
"epoch": 3.510487710719341,
"grad_norm": 1.4836064577102661,
"learning_rate": 1.4900900900900902e-05,
"loss": 0.4352,
"step": 13640
},
{
"epoch": 3.513061382061511,
"grad_norm": 1.051338791847229,
"learning_rate": 1.4875160875160877e-05,
"loss": 0.4109,
"step": 13650
},
{
"epoch": 3.51563505340368,
"grad_norm": 1.345146656036377,
"learning_rate": 1.484942084942085e-05,
"loss": 0.4174,
"step": 13660
},
{
"epoch": 3.51820872474585,
"grad_norm": 1.2961459159851074,
"learning_rate": 1.4823680823680825e-05,
"loss": 0.474,
"step": 13670
},
{
"epoch": 3.52078239608802,
"grad_norm": 1.370326042175293,
"learning_rate": 1.4797940797940799e-05,
"loss": 0.4244,
"step": 13680
},
{
"epoch": 3.523356067430189,
"grad_norm": 3.830646514892578,
"learning_rate": 1.4772200772200774e-05,
"loss": 0.4581,
"step": 13690
},
{
"epoch": 3.525929738772359,
"grad_norm": 1.1295220851898193,
"learning_rate": 1.4746460746460747e-05,
"loss": 0.435,
"step": 13700
},
{
"epoch": 3.5285034101145283,
"grad_norm": 1.1341050863265991,
"learning_rate": 1.472072072072072e-05,
"loss": 0.4484,
"step": 13710
},
{
"epoch": 3.531077081456698,
"grad_norm": 1.2032519578933716,
"learning_rate": 1.4694980694980695e-05,
"loss": 0.4368,
"step": 13720
},
{
"epoch": 3.5336507527988674,
"grad_norm": 2.008488655090332,
"learning_rate": 1.4669240669240668e-05,
"loss": 0.3941,
"step": 13730
},
{
"epoch": 3.536224424141037,
"grad_norm": 1.0747300386428833,
"learning_rate": 1.4643500643500643e-05,
"loss": 0.4884,
"step": 13740
},
{
"epoch": 3.538798095483207,
"grad_norm": 2.756009578704834,
"learning_rate": 1.4617760617760617e-05,
"loss": 0.4298,
"step": 13750
},
{
"epoch": 3.5413717668253764,
"grad_norm": 0.8556985259056091,
"learning_rate": 1.4592020592020592e-05,
"loss": 0.4827,
"step": 13760
},
{
"epoch": 3.543945438167546,
"grad_norm": 1.360617995262146,
"learning_rate": 1.4566280566280565e-05,
"loss": 0.4264,
"step": 13770
},
{
"epoch": 3.5465191095097155,
"grad_norm": 1.2899786233901978,
"learning_rate": 1.4540540540540543e-05,
"loss": 0.4293,
"step": 13780
},
{
"epoch": 3.5490927808518853,
"grad_norm": 1.2246886491775513,
"learning_rate": 1.4514800514800517e-05,
"loss": 0.4131,
"step": 13790
},
{
"epoch": 3.5516664521940546,
"grad_norm": 1.1932190656661987,
"learning_rate": 1.448906048906049e-05,
"loss": 0.476,
"step": 13800
},
{
"epoch": 3.5542401235362244,
"grad_norm": 4.828482151031494,
"learning_rate": 1.4463320463320465e-05,
"loss": 0.465,
"step": 13810
},
{
"epoch": 3.556813794878394,
"grad_norm": 1.4605960845947266,
"learning_rate": 1.4437580437580438e-05,
"loss": 0.4007,
"step": 13820
},
{
"epoch": 3.5593874662205636,
"grad_norm": 0.9224157929420471,
"learning_rate": 1.4411840411840413e-05,
"loss": 0.4044,
"step": 13830
},
{
"epoch": 3.5619611375627334,
"grad_norm": 1.017256498336792,
"learning_rate": 1.4386100386100387e-05,
"loss": 0.4502,
"step": 13840
},
{
"epoch": 3.5645348089049027,
"grad_norm": 1.9395209550857544,
"learning_rate": 1.4360360360360362e-05,
"loss": 0.3843,
"step": 13850
},
{
"epoch": 3.5671084802470725,
"grad_norm": 1.1960591077804565,
"learning_rate": 1.4334620334620335e-05,
"loss": 0.4528,
"step": 13860
},
{
"epoch": 3.569682151589242,
"grad_norm": 0.9504403471946716,
"learning_rate": 1.430888030888031e-05,
"loss": 0.4412,
"step": 13870
},
{
"epoch": 3.5722558229314116,
"grad_norm": 1.2557878494262695,
"learning_rate": 1.4283140283140283e-05,
"loss": 0.3845,
"step": 13880
},
{
"epoch": 3.5748294942735814,
"grad_norm": 4.165874004364014,
"learning_rate": 1.4257400257400258e-05,
"loss": 0.3932,
"step": 13890
},
{
"epoch": 3.577403165615751,
"grad_norm": 3.318126678466797,
"learning_rate": 1.4231660231660232e-05,
"loss": 0.3809,
"step": 13900
},
{
"epoch": 3.5799768369579206,
"grad_norm": 1.0943117141723633,
"learning_rate": 1.4205920205920207e-05,
"loss": 0.48,
"step": 13910
},
{
"epoch": 3.58255050830009,
"grad_norm": 1.2082642316818237,
"learning_rate": 1.418018018018018e-05,
"loss": 0.4894,
"step": 13920
},
{
"epoch": 3.5851241796422597,
"grad_norm": 0.6787092089653015,
"learning_rate": 1.4154440154440155e-05,
"loss": 0.4201,
"step": 13930
},
{
"epoch": 3.587697850984429,
"grad_norm": 1.3796510696411133,
"learning_rate": 1.4128700128700128e-05,
"loss": 0.5031,
"step": 13940
},
{
"epoch": 3.590271522326599,
"grad_norm": 0.9589558839797974,
"learning_rate": 1.4102960102960103e-05,
"loss": 0.4299,
"step": 13950
},
{
"epoch": 3.5928451936687686,
"grad_norm": 1.0614103078842163,
"learning_rate": 1.4077220077220076e-05,
"loss": 0.4265,
"step": 13960
},
{
"epoch": 3.595418865010938,
"grad_norm": 1.4390029907226562,
"learning_rate": 1.4051480051480051e-05,
"loss": 0.4648,
"step": 13970
},
{
"epoch": 3.597992536353108,
"grad_norm": 4.360158920288086,
"learning_rate": 1.4025740025740025e-05,
"loss": 0.4575,
"step": 13980
},
{
"epoch": 3.6005662076952776,
"grad_norm": 2.3131937980651855,
"learning_rate": 1.4000000000000001e-05,
"loss": 0.4087,
"step": 13990
},
{
"epoch": 3.603139879037447,
"grad_norm": 1.0350521802902222,
"learning_rate": 1.3974259974259976e-05,
"loss": 0.4096,
"step": 14000
},
{
"epoch": 3.603139879037447,
"eval_loss": 0.8108994960784912,
"eval_runtime": 395.6175,
"eval_samples_per_second": 49.103,
"eval_steps_per_second": 2.457,
"eval_token_accuracy": 0.0003543528268510997,
"step": 14000
},
{
"epoch": 3.6057135503796163,
"grad_norm": 1.2455912828445435,
"learning_rate": 1.394851994851995e-05,
"loss": 0.4473,
"step": 14010
},
{
"epoch": 3.608287221721786,
"grad_norm": 0.8037226796150208,
"learning_rate": 1.3922779922779925e-05,
"loss": 0.4257,
"step": 14020
},
{
"epoch": 3.610860893063956,
"grad_norm": 2.2132887840270996,
"learning_rate": 1.3897039897039898e-05,
"loss": 0.4057,
"step": 14030
},
{
"epoch": 3.613434564406125,
"grad_norm": 1.2261261940002441,
"learning_rate": 1.3871299871299873e-05,
"loss": 0.4251,
"step": 14040
},
{
"epoch": 3.616008235748295,
"grad_norm": 0.9967421293258667,
"learning_rate": 1.3845559845559846e-05,
"loss": 0.4117,
"step": 14050
},
{
"epoch": 3.618581907090465,
"grad_norm": 1.0189718008041382,
"learning_rate": 1.3819819819819821e-05,
"loss": 0.4292,
"step": 14060
},
{
"epoch": 3.621155578432634,
"grad_norm": 1.333105206489563,
"learning_rate": 1.3794079794079795e-05,
"loss": 0.4989,
"step": 14070
},
{
"epoch": 3.6237292497748035,
"grad_norm": 3.6797611713409424,
"learning_rate": 1.376833976833977e-05,
"loss": 0.4377,
"step": 14080
},
{
"epoch": 3.6263029211169733,
"grad_norm": 2.8054308891296387,
"learning_rate": 1.3742599742599743e-05,
"loss": 0.4932,
"step": 14090
},
{
"epoch": 3.628876592459143,
"grad_norm": 2.897303819656372,
"learning_rate": 1.3716859716859718e-05,
"loss": 0.4816,
"step": 14100
},
{
"epoch": 3.6314502638013124,
"grad_norm": 1.0359746217727661,
"learning_rate": 1.3691119691119691e-05,
"loss": 0.4421,
"step": 14110
},
{
"epoch": 3.634023935143482,
"grad_norm": 1.1515438556671143,
"learning_rate": 1.3665379665379666e-05,
"loss": 0.4766,
"step": 14120
},
{
"epoch": 3.636597606485652,
"grad_norm": 1.0731172561645508,
"learning_rate": 1.363963963963964e-05,
"loss": 0.4578,
"step": 14130
},
{
"epoch": 3.6391712778278213,
"grad_norm": 3.8715970516204834,
"learning_rate": 1.3613899613899615e-05,
"loss": 0.4549,
"step": 14140
},
{
"epoch": 3.641744949169991,
"grad_norm": 1.386406660079956,
"learning_rate": 1.3588159588159588e-05,
"loss": 0.5023,
"step": 14150
},
{
"epoch": 3.6443186205121605,
"grad_norm": 1.2222354412078857,
"learning_rate": 1.3562419562419563e-05,
"loss": 0.4375,
"step": 14160
},
{
"epoch": 3.6468922918543303,
"grad_norm": 1.2102971076965332,
"learning_rate": 1.3536679536679536e-05,
"loss": 0.4404,
"step": 14170
},
{
"epoch": 3.6494659631964996,
"grad_norm": 0.73658686876297,
"learning_rate": 1.3510939510939511e-05,
"loss": 0.4004,
"step": 14180
},
{
"epoch": 3.6520396345386694,
"grad_norm": 1.3987421989440918,
"learning_rate": 1.3485199485199485e-05,
"loss": 0.432,
"step": 14190
},
{
"epoch": 3.654613305880839,
"grad_norm": 1.2989593744277954,
"learning_rate": 1.3459459459459461e-05,
"loss": 0.4684,
"step": 14200
},
{
"epoch": 3.6571869772230086,
"grad_norm": 1.5131486654281616,
"learning_rate": 1.3433719433719436e-05,
"loss": 0.5014,
"step": 14210
},
{
"epoch": 3.6597606485651784,
"grad_norm": 0.9460519552230835,
"learning_rate": 1.340797940797941e-05,
"loss": 0.4686,
"step": 14220
},
{
"epoch": 3.6623343199073477,
"grad_norm": 1.3320332765579224,
"learning_rate": 1.3382239382239385e-05,
"loss": 0.4612,
"step": 14230
},
{
"epoch": 3.6649079912495175,
"grad_norm": 1.110107421875,
"learning_rate": 1.3356499356499358e-05,
"loss": 0.4824,
"step": 14240
},
{
"epoch": 3.667481662591687,
"grad_norm": 1.6514250040054321,
"learning_rate": 1.3330759330759333e-05,
"loss": 0.4678,
"step": 14250
},
{
"epoch": 3.6700553339338566,
"grad_norm": 2.858354330062866,
"learning_rate": 1.3305019305019306e-05,
"loss": 0.4576,
"step": 14260
},
{
"epoch": 3.6726290052760264,
"grad_norm": 1.5715115070343018,
"learning_rate": 1.327927927927928e-05,
"loss": 0.4547,
"step": 14270
},
{
"epoch": 3.6752026766181958,
"grad_norm": 0.938371479511261,
"learning_rate": 1.3253539253539254e-05,
"loss": 0.3987,
"step": 14280
},
{
"epoch": 3.6777763479603656,
"grad_norm": 0.9989467859268188,
"learning_rate": 1.3227799227799228e-05,
"loss": 0.4403,
"step": 14290
},
{
"epoch": 3.680350019302535,
"grad_norm": 1.117611289024353,
"learning_rate": 1.3202059202059203e-05,
"loss": 0.4009,
"step": 14300
},
{
"epoch": 3.6829236906447047,
"grad_norm": 1.238559603691101,
"learning_rate": 1.3176319176319176e-05,
"loss": 0.4198,
"step": 14310
},
{
"epoch": 3.685497361986874,
"grad_norm": 1.2747526168823242,
"learning_rate": 1.3150579150579151e-05,
"loss": 0.439,
"step": 14320
},
{
"epoch": 3.688071033329044,
"grad_norm": 3.8777308464050293,
"learning_rate": 1.3124839124839124e-05,
"loss": 0.4573,
"step": 14330
},
{
"epoch": 3.6906447046712136,
"grad_norm": 1.1357238292694092,
"learning_rate": 1.30990990990991e-05,
"loss": 0.4578,
"step": 14340
},
{
"epoch": 3.693218376013383,
"grad_norm": 1.4202942848205566,
"learning_rate": 1.3073359073359073e-05,
"loss": 0.4409,
"step": 14350
},
{
"epoch": 3.695792047355553,
"grad_norm": 1.4478346109390259,
"learning_rate": 1.3047619047619048e-05,
"loss": 0.4707,
"step": 14360
},
{
"epoch": 3.6983657186977226,
"grad_norm": 1.4722789525985718,
"learning_rate": 1.3021879021879021e-05,
"loss": 0.4445,
"step": 14370
},
{
"epoch": 3.700939390039892,
"grad_norm": 1.0790144205093384,
"learning_rate": 1.2996138996138996e-05,
"loss": 0.454,
"step": 14380
},
{
"epoch": 3.7035130613820613,
"grad_norm": 2.759157419204712,
"learning_rate": 1.297039897039897e-05,
"loss": 0.3881,
"step": 14390
},
{
"epoch": 3.706086732724231,
"grad_norm": 1.4952332973480225,
"learning_rate": 1.2944658944658944e-05,
"loss": 0.4109,
"step": 14400
},
{
"epoch": 3.708660404066401,
"grad_norm": 1.024506688117981,
"learning_rate": 1.2918918918918921e-05,
"loss": 0.4616,
"step": 14410
},
{
"epoch": 3.71123407540857,
"grad_norm": 5.463751316070557,
"learning_rate": 1.2893178893178894e-05,
"loss": 0.4293,
"step": 14420
},
{
"epoch": 3.71380774675074,
"grad_norm": 1.2399414777755737,
"learning_rate": 1.286743886743887e-05,
"loss": 0.4671,
"step": 14430
},
{
"epoch": 3.71638141809291,
"grad_norm": 1.1366684436798096,
"learning_rate": 1.2841698841698843e-05,
"loss": 0.3873,
"step": 14440
},
{
"epoch": 3.718955089435079,
"grad_norm": 1.025657296180725,
"learning_rate": 1.2815958815958818e-05,
"loss": 0.4927,
"step": 14450
},
{
"epoch": 3.7215287607772485,
"grad_norm": 1.4381457567214966,
"learning_rate": 1.2790218790218791e-05,
"loss": 0.399,
"step": 14460
},
{
"epoch": 3.7241024321194183,
"grad_norm": 4.138847351074219,
"learning_rate": 1.2764478764478766e-05,
"loss": 0.493,
"step": 14470
},
{
"epoch": 3.726676103461588,
"grad_norm": 3.5278961658477783,
"learning_rate": 1.273873873873874e-05,
"loss": 0.4522,
"step": 14480
},
{
"epoch": 3.7292497748037574,
"grad_norm": 1.029931902885437,
"learning_rate": 1.2712998712998714e-05,
"loss": 0.3838,
"step": 14490
},
{
"epoch": 3.731823446145927,
"grad_norm": 0.8327284455299377,
"learning_rate": 1.268983268983269e-05,
"loss": 0.4587,
"step": 14500
},
{
"epoch": 3.734397117488097,
"grad_norm": 2.9695467948913574,
"learning_rate": 1.2664092664092665e-05,
"loss": 0.4544,
"step": 14510
},
{
"epoch": 3.7369707888302663,
"grad_norm": 1.0291829109191895,
"learning_rate": 1.2638352638352638e-05,
"loss": 0.4691,
"step": 14520
},
{
"epoch": 3.739544460172436,
"grad_norm": 1.1941965818405151,
"learning_rate": 1.2612612612612611e-05,
"loss": 0.4585,
"step": 14530
},
{
"epoch": 3.7421181315146055,
"grad_norm": 1.7099372148513794,
"learning_rate": 1.2586872586872586e-05,
"loss": 0.4164,
"step": 14540
},
{
"epoch": 3.7446918028567753,
"grad_norm": 2.46553373336792,
"learning_rate": 1.2561132561132563e-05,
"loss": 0.4529,
"step": 14550
},
{
"epoch": 3.7472654741989446,
"grad_norm": 1.0007997751235962,
"learning_rate": 1.2535392535392538e-05,
"loss": 0.403,
"step": 14560
},
{
"epoch": 3.7498391455411144,
"grad_norm": 1.1691094636917114,
"learning_rate": 1.2509652509652511e-05,
"loss": 0.4435,
"step": 14570
},
{
"epoch": 3.752412816883284,
"grad_norm": 1.2735754251480103,
"learning_rate": 1.2483912483912485e-05,
"loss": 0.5126,
"step": 14580
},
{
"epoch": 3.7549864882254536,
"grad_norm": 3.02443265914917,
"learning_rate": 1.2458172458172458e-05,
"loss": 0.4898,
"step": 14590
},
{
"epoch": 3.7575601595676233,
"grad_norm": 1.1990752220153809,
"learning_rate": 1.2432432432432435e-05,
"loss": 0.4059,
"step": 14600
},
{
"epoch": 3.7601338309097927,
"grad_norm": 1.180837631225586,
"learning_rate": 1.2406692406692408e-05,
"loss": 0.4384,
"step": 14610
},
{
"epoch": 3.7627075022519625,
"grad_norm": 0.8962213397026062,
"learning_rate": 1.2380952380952381e-05,
"loss": 0.4297,
"step": 14620
},
{
"epoch": 3.765281173594132,
"grad_norm": 0.9834825396537781,
"learning_rate": 1.2355212355212356e-05,
"loss": 0.4057,
"step": 14630
},
{
"epoch": 3.7678548449363016,
"grad_norm": 1.174039363861084,
"learning_rate": 1.232947232947233e-05,
"loss": 0.4586,
"step": 14640
},
{
"epoch": 3.7704285162784714,
"grad_norm": 0.9779637455940247,
"learning_rate": 1.2303732303732304e-05,
"loss": 0.4011,
"step": 14650
},
{
"epoch": 3.7730021876206408,
"grad_norm": 1.003485918045044,
"learning_rate": 1.2277992277992278e-05,
"loss": 0.4899,
"step": 14660
},
{
"epoch": 3.7755758589628106,
"grad_norm": 1.3274792432785034,
"learning_rate": 1.2252252252252253e-05,
"loss": 0.4584,
"step": 14670
},
{
"epoch": 3.77814953030498,
"grad_norm": 1.5608062744140625,
"learning_rate": 1.2226512226512226e-05,
"loss": 0.5178,
"step": 14680
},
{
"epoch": 3.7807232016471497,
"grad_norm": 3.1873466968536377,
"learning_rate": 1.2200772200772201e-05,
"loss": 0.448,
"step": 14690
},
{
"epoch": 3.783296872989319,
"grad_norm": 0.8975459337234497,
"learning_rate": 1.2175032175032174e-05,
"loss": 0.3988,
"step": 14700
},
{
"epoch": 3.785870544331489,
"grad_norm": 1.3727668523788452,
"learning_rate": 1.2149292149292151e-05,
"loss": 0.4206,
"step": 14710
},
{
"epoch": 3.7884442156736586,
"grad_norm": 1.0184861421585083,
"learning_rate": 1.2123552123552124e-05,
"loss": 0.407,
"step": 14720
},
{
"epoch": 3.791017887015828,
"grad_norm": 0.9267153739929199,
"learning_rate": 1.20978120978121e-05,
"loss": 0.4058,
"step": 14730
},
{
"epoch": 3.7935915583579978,
"grad_norm": 1.0847712755203247,
"learning_rate": 1.2072072072072073e-05,
"loss": 0.428,
"step": 14740
},
{
"epoch": 3.7961652297001676,
"grad_norm": 1.368102788925171,
"learning_rate": 1.2046332046332048e-05,
"loss": 0.4002,
"step": 14750
},
{
"epoch": 3.798738901042337,
"grad_norm": 1.2423478364944458,
"learning_rate": 1.2020592020592021e-05,
"loss": 0.4696,
"step": 14760
},
{
"epoch": 3.8013125723845063,
"grad_norm": 1.3219197988510132,
"learning_rate": 1.1994851994851996e-05,
"loss": 0.4566,
"step": 14770
},
{
"epoch": 3.803886243726676,
"grad_norm": 0.9948756098747253,
"learning_rate": 1.196911196911197e-05,
"loss": 0.4422,
"step": 14780
},
{
"epoch": 3.806459915068846,
"grad_norm": 1.5322728157043457,
"learning_rate": 1.1943371943371944e-05,
"loss": 0.4079,
"step": 14790
},
{
"epoch": 3.809033586411015,
"grad_norm": 1.384191870689392,
"learning_rate": 1.1917631917631918e-05,
"loss": 0.4617,
"step": 14800
},
{
"epoch": 3.811607257753185,
"grad_norm": 1.3496747016906738,
"learning_rate": 1.1891891891891893e-05,
"loss": 0.4324,
"step": 14810
},
{
"epoch": 3.8141809290953548,
"grad_norm": 1.0405890941619873,
"learning_rate": 1.1866151866151868e-05,
"loss": 0.3989,
"step": 14820
},
{
"epoch": 3.816754600437524,
"grad_norm": 0.8725833296775818,
"learning_rate": 1.1840411840411841e-05,
"loss": 0.4727,
"step": 14830
},
{
"epoch": 3.8193282717796935,
"grad_norm": 1.375089406967163,
"learning_rate": 1.1814671814671816e-05,
"loss": 0.5229,
"step": 14840
},
{
"epoch": 3.8219019431218633,
"grad_norm": 1.0093263387680054,
"learning_rate": 1.178893178893179e-05,
"loss": 0.4561,
"step": 14850
},
{
"epoch": 3.824475614464033,
"grad_norm": 1.45624577999115,
"learning_rate": 1.1763191763191764e-05,
"loss": 0.4518,
"step": 14860
},
{
"epoch": 3.8270492858062024,
"grad_norm": 0.9073015451431274,
"learning_rate": 1.1737451737451738e-05,
"loss": 0.4404,
"step": 14870
},
{
"epoch": 3.829622957148372,
"grad_norm": 1.0322130918502808,
"learning_rate": 1.1711711711711713e-05,
"loss": 0.4328,
"step": 14880
},
{
"epoch": 3.832196628490542,
"grad_norm": 1.549871563911438,
"learning_rate": 1.1685971685971686e-05,
"loss": 0.4882,
"step": 14890
},
{
"epoch": 3.8347702998327113,
"grad_norm": 0.8814623355865479,
"learning_rate": 1.166023166023166e-05,
"loss": 0.4859,
"step": 14900
},
{
"epoch": 3.837343971174881,
"grad_norm": 1.2014186382293701,
"learning_rate": 1.1634491634491634e-05,
"loss": 0.4584,
"step": 14910
},
{
"epoch": 3.8399176425170505,
"grad_norm": 1.3947405815124512,
"learning_rate": 1.1608751608751609e-05,
"loss": 0.4389,
"step": 14920
},
{
"epoch": 3.8424913138592203,
"grad_norm": 1.3492847681045532,
"learning_rate": 1.1583011583011584e-05,
"loss": 0.4413,
"step": 14930
},
{
"epoch": 3.8450649852013896,
"grad_norm": 0.9952030777931213,
"learning_rate": 1.1557271557271557e-05,
"loss": 0.4203,
"step": 14940
},
{
"epoch": 3.8476386565435594,
"grad_norm": 1.1566764116287231,
"learning_rate": 1.1531531531531532e-05,
"loss": 0.4489,
"step": 14950
},
{
"epoch": 3.850212327885729,
"grad_norm": 1.0022921562194824,
"learning_rate": 1.1505791505791506e-05,
"loss": 0.4422,
"step": 14960
},
{
"epoch": 3.8527859992278986,
"grad_norm": 1.1566778421401978,
"learning_rate": 1.148005148005148e-05,
"loss": 0.4089,
"step": 14970
},
{
"epoch": 3.8553596705700683,
"grad_norm": 1.3538066148757935,
"learning_rate": 1.1454311454311454e-05,
"loss": 0.4617,
"step": 14980
},
{
"epoch": 3.8579333419122377,
"grad_norm": 1.1044282913208008,
"learning_rate": 1.1428571428571429e-05,
"loss": 0.4734,
"step": 14990
},
{
"epoch": 3.8605070132544075,
"grad_norm": 1.195326805114746,
"learning_rate": 1.1402831402831402e-05,
"loss": 0.4171,
"step": 15000
},
{
"epoch": 3.8605070132544075,
"eval_loss": 0.8098340630531311,
"eval_runtime": 395.6098,
"eval_samples_per_second": 49.104,
"eval_steps_per_second": 2.457,
"eval_token_accuracy": 0.0003602365642010644,
"step": 15000
},
{
"epoch": 3.863080684596577,
"grad_norm": 3.7595245838165283,
"learning_rate": 1.1377091377091377e-05,
"loss": 0.4476,
"step": 15010
},
{
"epoch": 3.8656543559387466,
"grad_norm": 1.0867187976837158,
"learning_rate": 1.1351351351351352e-05,
"loss": 0.4016,
"step": 15020
},
{
"epoch": 3.8682280272809164,
"grad_norm": 1.2275842428207397,
"learning_rate": 1.1325611325611327e-05,
"loss": 0.4635,
"step": 15030
},
{
"epoch": 3.8708016986230858,
"grad_norm": 2.140843629837036,
"learning_rate": 1.12998712998713e-05,
"loss": 0.4431,
"step": 15040
},
{
"epoch": 3.8733753699652556,
"grad_norm": 5.140436172485352,
"learning_rate": 1.1274131274131276e-05,
"loss": 0.5156,
"step": 15050
},
{
"epoch": 3.875949041307425,
"grad_norm": 1.347815990447998,
"learning_rate": 1.1248391248391249e-05,
"loss": 0.4836,
"step": 15060
},
{
"epoch": 3.8785227126495947,
"grad_norm": 0.9270965456962585,
"learning_rate": 1.1222651222651224e-05,
"loss": 0.4724,
"step": 15070
},
{
"epoch": 3.881096383991764,
"grad_norm": 3.1542866230010986,
"learning_rate": 1.1196911196911197e-05,
"loss": 0.4842,
"step": 15080
},
{
"epoch": 3.883670055333934,
"grad_norm": 3.4730048179626465,
"learning_rate": 1.1171171171171172e-05,
"loss": 0.4559,
"step": 15090
},
{
"epoch": 3.8862437266761036,
"grad_norm": 1.7973135709762573,
"learning_rate": 1.1145431145431146e-05,
"loss": 0.4053,
"step": 15100
},
{
"epoch": 3.888817398018273,
"grad_norm": 1.1773796081542969,
"learning_rate": 1.1119691119691119e-05,
"loss": 0.3808,
"step": 15110
},
{
"epoch": 3.8913910693604428,
"grad_norm": 2.933912754058838,
"learning_rate": 1.1093951093951096e-05,
"loss": 0.4514,
"step": 15120
},
{
"epoch": 3.893964740702612,
"grad_norm": 1.0887664556503296,
"learning_rate": 1.1068211068211069e-05,
"loss": 0.4728,
"step": 15130
},
{
"epoch": 3.896538412044782,
"grad_norm": 1.090585708618164,
"learning_rate": 1.1042471042471044e-05,
"loss": 0.45,
"step": 15140
},
{
"epoch": 3.8991120833869513,
"grad_norm": 2.140028238296509,
"learning_rate": 1.1016731016731017e-05,
"loss": 0.4672,
"step": 15150
},
{
"epoch": 3.901685754729121,
"grad_norm": 0.912236213684082,
"learning_rate": 1.0990990990990992e-05,
"loss": 0.4462,
"step": 15160
},
{
"epoch": 3.904259426071291,
"grad_norm": 2.3811190128326416,
"learning_rate": 1.0965250965250965e-05,
"loss": 0.446,
"step": 15170
},
{
"epoch": 3.90683309741346,
"grad_norm": 1.1356325149536133,
"learning_rate": 1.093951093951094e-05,
"loss": 0.4317,
"step": 15180
},
{
"epoch": 3.90940676875563,
"grad_norm": 1.289565920829773,
"learning_rate": 1.0913770913770914e-05,
"loss": 0.4078,
"step": 15190
},
{
"epoch": 3.9119804400977998,
"grad_norm": 0.9899163842201233,
"learning_rate": 1.0888030888030889e-05,
"loss": 0.4366,
"step": 15200
},
{
"epoch": 3.914554111439969,
"grad_norm": 1.4149761199951172,
"learning_rate": 1.0862290862290862e-05,
"loss": 0.5163,
"step": 15210
},
{
"epoch": 3.9171277827821385,
"grad_norm": 1.1276021003723145,
"learning_rate": 1.0836550836550837e-05,
"loss": 0.4242,
"step": 15220
},
{
"epoch": 3.9197014541243083,
"grad_norm": 1.067687749862671,
"learning_rate": 1.0810810810810812e-05,
"loss": 0.4305,
"step": 15230
},
{
"epoch": 3.922275125466478,
"grad_norm": 2.6914680004119873,
"learning_rate": 1.0785070785070785e-05,
"loss": 0.3787,
"step": 15240
},
{
"epoch": 3.9248487968086474,
"grad_norm": 1.443280816078186,
"learning_rate": 1.075933075933076e-05,
"loss": 0.4043,
"step": 15250
},
{
"epoch": 3.927422468150817,
"grad_norm": 3.6616697311401367,
"learning_rate": 1.0733590733590734e-05,
"loss": 0.4174,
"step": 15260
},
{
"epoch": 3.929996139492987,
"grad_norm": 1.0795118808746338,
"learning_rate": 1.0707850707850709e-05,
"loss": 0.443,
"step": 15270
},
{
"epoch": 3.9325698108351563,
"grad_norm": 1.2405767440795898,
"learning_rate": 1.0682110682110682e-05,
"loss": 0.401,
"step": 15280
},
{
"epoch": 3.9351434821773257,
"grad_norm": 1.446061611175537,
"learning_rate": 1.0656370656370657e-05,
"loss": 0.4593,
"step": 15290
},
{
"epoch": 3.9377171535194955,
"grad_norm": 4.733116626739502,
"learning_rate": 1.063063063063063e-05,
"loss": 0.4237,
"step": 15300
},
{
"epoch": 3.9402908248616653,
"grad_norm": 1.0354984998703003,
"learning_rate": 1.0604890604890605e-05,
"loss": 0.4924,
"step": 15310
},
{
"epoch": 3.9428644962038346,
"grad_norm": 4.033681392669678,
"learning_rate": 1.0579150579150579e-05,
"loss": 0.4451,
"step": 15320
},
{
"epoch": 3.9454381675460044,
"grad_norm": 1.3079330921173096,
"learning_rate": 1.0553410553410555e-05,
"loss": 0.4257,
"step": 15330
},
{
"epoch": 3.948011838888174,
"grad_norm": 5.547299385070801,
"learning_rate": 1.0527670527670529e-05,
"loss": 0.4284,
"step": 15340
},
{
"epoch": 3.9505855102303435,
"grad_norm": 1.283423900604248,
"learning_rate": 1.0501930501930504e-05,
"loss": 0.4863,
"step": 15350
},
{
"epoch": 3.9531591815725133,
"grad_norm": 1.2620773315429688,
"learning_rate": 1.0476190476190477e-05,
"loss": 0.4162,
"step": 15360
},
{
"epoch": 3.9557328529146827,
"grad_norm": 1.2353228330612183,
"learning_rate": 1.0450450450450452e-05,
"loss": 0.4916,
"step": 15370
},
{
"epoch": 3.9583065242568525,
"grad_norm": 1.3218683004379272,
"learning_rate": 1.0424710424710425e-05,
"loss": 0.3846,
"step": 15380
},
{
"epoch": 3.960880195599022,
"grad_norm": 0.9560413956642151,
"learning_rate": 1.0398970398970398e-05,
"loss": 0.4208,
"step": 15390
},
{
"epoch": 3.9634538669411916,
"grad_norm": 1.0743191242218018,
"learning_rate": 1.0373230373230373e-05,
"loss": 0.4151,
"step": 15400
},
{
"epoch": 3.9660275382833614,
"grad_norm": 2.8277199268341064,
"learning_rate": 1.0347490347490347e-05,
"loss": 0.4162,
"step": 15410
},
{
"epoch": 3.9686012096255308,
"grad_norm": 1.490350604057312,
"learning_rate": 1.0321750321750322e-05,
"loss": 0.486,
"step": 15420
},
{
"epoch": 3.9711748809677005,
"grad_norm": 1.0201480388641357,
"learning_rate": 1.0296010296010295e-05,
"loss": 0.4567,
"step": 15430
},
{
"epoch": 3.97374855230987,
"grad_norm": 0.8804197311401367,
"learning_rate": 1.0270270270270272e-05,
"loss": 0.4716,
"step": 15440
},
{
"epoch": 3.9763222236520397,
"grad_norm": 3.7693941593170166,
"learning_rate": 1.0244530244530245e-05,
"loss": 0.478,
"step": 15450
},
{
"epoch": 3.978895894994209,
"grad_norm": 5.499643325805664,
"learning_rate": 1.021879021879022e-05,
"loss": 0.4223,
"step": 15460
},
{
"epoch": 3.981469566336379,
"grad_norm": 0.8914560675621033,
"learning_rate": 1.0193050193050193e-05,
"loss": 0.4079,
"step": 15470
},
{
"epoch": 3.9840432376785486,
"grad_norm": 1.4804257154464722,
"learning_rate": 1.0167310167310168e-05,
"loss": 0.4416,
"step": 15480
},
{
"epoch": 3.986616909020718,
"grad_norm": 1.3806978464126587,
"learning_rate": 1.0141570141570142e-05,
"loss": 0.4259,
"step": 15490
},
{
"epoch": 3.9891905803628878,
"grad_norm": 5.6986212730407715,
"learning_rate": 1.0115830115830117e-05,
"loss": 0.458,
"step": 15500
},
{
"epoch": 3.991764251705057,
"grad_norm": 1.4306472539901733,
"learning_rate": 1.009009009009009e-05,
"loss": 0.4718,
"step": 15510
},
{
"epoch": 3.994337923047227,
"grad_norm": 0.9893137216567993,
"learning_rate": 1.0064350064350065e-05,
"loss": 0.3794,
"step": 15520
},
{
"epoch": 3.9969115943893962,
"grad_norm": 1.7962301969528198,
"learning_rate": 1.0038610038610038e-05,
"loss": 0.4687,
"step": 15530
},
{
"epoch": 3.999485265731566,
"grad_norm": 0.967959463596344,
"learning_rate": 1.0012870012870013e-05,
"loss": 0.4552,
"step": 15540
},
{
"epoch": 4.002058937073736,
"grad_norm": 0.9137493371963501,
"learning_rate": 9.987129987129988e-06,
"loss": 0.3108,
"step": 15550
},
{
"epoch": 4.004632608415905,
"grad_norm": 1.2810945510864258,
"learning_rate": 9.961389961389962e-06,
"loss": 0.3835,
"step": 15560
},
{
"epoch": 4.0072062797580745,
"grad_norm": 0.8937966227531433,
"learning_rate": 9.935649935649937e-06,
"loss": 0.3403,
"step": 15570
},
{
"epoch": 4.009779951100245,
"grad_norm": 1.049453616142273,
"learning_rate": 9.90990990990991e-06,
"loss": 0.3809,
"step": 15580
},
{
"epoch": 4.012353622442414,
"grad_norm": 1.1060616970062256,
"learning_rate": 9.884169884169885e-06,
"loss": 0.3363,
"step": 15590
},
{
"epoch": 4.0149272937845835,
"grad_norm": 0.9460583329200745,
"learning_rate": 9.858429858429858e-06,
"loss": 0.3511,
"step": 15600
},
{
"epoch": 4.017500965126754,
"grad_norm": 0.9251907467842102,
"learning_rate": 9.832689832689833e-06,
"loss": 0.3859,
"step": 15610
},
{
"epoch": 4.020074636468923,
"grad_norm": 1.1467247009277344,
"learning_rate": 9.806949806949807e-06,
"loss": 0.3202,
"step": 15620
},
{
"epoch": 4.022648307811092,
"grad_norm": 1.3818519115447998,
"learning_rate": 9.781209781209782e-06,
"loss": 0.3372,
"step": 15630
},
{
"epoch": 4.025221979153262,
"grad_norm": 0.9987813234329224,
"learning_rate": 9.755469755469757e-06,
"loss": 0.3872,
"step": 15640
},
{
"epoch": 4.027795650495432,
"grad_norm": 0.863815188407898,
"learning_rate": 9.729729729729732e-06,
"loss": 0.3444,
"step": 15650
},
{
"epoch": 4.030369321837601,
"grad_norm": 1.5916346311569214,
"learning_rate": 9.703989703989705e-06,
"loss": 0.3448,
"step": 15660
},
{
"epoch": 4.032942993179771,
"grad_norm": 0.9141498804092407,
"learning_rate": 9.678249678249678e-06,
"loss": 0.3513,
"step": 15670
},
{
"epoch": 4.035516664521941,
"grad_norm": 0.990973174571991,
"learning_rate": 9.652509652509653e-06,
"loss": 0.3584,
"step": 15680
},
{
"epoch": 4.03809033586411,
"grad_norm": 1.2190366983413696,
"learning_rate": 9.626769626769626e-06,
"loss": 0.3294,
"step": 15690
},
{
"epoch": 4.04066400720628,
"grad_norm": 1.7479588985443115,
"learning_rate": 9.601029601029601e-06,
"loss": 0.3963,
"step": 15700
},
{
"epoch": 4.043237678548449,
"grad_norm": 1.09687340259552,
"learning_rate": 9.575289575289575e-06,
"loss": 0.3238,
"step": 15710
},
{
"epoch": 4.045811349890619,
"grad_norm": 0.9358935356140137,
"learning_rate": 9.54954954954955e-06,
"loss": 0.345,
"step": 15720
},
{
"epoch": 4.0483850212327885,
"grad_norm": 3.78627872467041,
"learning_rate": 9.523809523809523e-06,
"loss": 0.3594,
"step": 15730
},
{
"epoch": 4.050958692574958,
"grad_norm": 3.2878758907318115,
"learning_rate": 9.498069498069498e-06,
"loss": 0.3389,
"step": 15740
},
{
"epoch": 4.053532363917128,
"grad_norm": 1.448227882385254,
"learning_rate": 9.472329472329473e-06,
"loss": 0.3344,
"step": 15750
},
{
"epoch": 4.0561060352592975,
"grad_norm": 1.4450225830078125,
"learning_rate": 9.446589446589448e-06,
"loss": 0.3224,
"step": 15760
},
{
"epoch": 4.058679706601467,
"grad_norm": 1.051365852355957,
"learning_rate": 9.420849420849421e-06,
"loss": 0.2675,
"step": 15770
},
{
"epoch": 4.061253377943636,
"grad_norm": 3.0001602172851562,
"learning_rate": 9.395109395109396e-06,
"loss": 0.353,
"step": 15780
},
{
"epoch": 4.063827049285806,
"grad_norm": 1.0692496299743652,
"learning_rate": 9.36936936936937e-06,
"loss": 0.3422,
"step": 15790
},
{
"epoch": 4.066400720627976,
"grad_norm": 1.7115063667297363,
"learning_rate": 9.343629343629345e-06,
"loss": 0.4045,
"step": 15800
},
{
"epoch": 4.068974391970145,
"grad_norm": 4.681840896606445,
"learning_rate": 9.317889317889318e-06,
"loss": 0.3603,
"step": 15810
},
{
"epoch": 4.071548063312315,
"grad_norm": 1.7194322347640991,
"learning_rate": 9.292149292149293e-06,
"loss": 0.3498,
"step": 15820
},
{
"epoch": 4.074121734654485,
"grad_norm": 0.9989597797393799,
"learning_rate": 9.266409266409266e-06,
"loss": 0.3276,
"step": 15830
},
{
"epoch": 4.076695405996654,
"grad_norm": 0.9685651659965515,
"learning_rate": 9.240669240669241e-06,
"loss": 0.316,
"step": 15840
},
{
"epoch": 4.079269077338824,
"grad_norm": 1.5660096406936646,
"learning_rate": 9.214929214929216e-06,
"loss": 0.3929,
"step": 15850
},
{
"epoch": 4.081842748680994,
"grad_norm": 1.4593199491500854,
"learning_rate": 9.18918918918919e-06,
"loss": 0.3564,
"step": 15860
},
{
"epoch": 4.084416420023163,
"grad_norm": 1.3041324615478516,
"learning_rate": 9.163449163449165e-06,
"loss": 0.2997,
"step": 15870
},
{
"epoch": 4.086990091365332,
"grad_norm": 1.8447177410125732,
"learning_rate": 9.137709137709138e-06,
"loss": 0.3274,
"step": 15880
},
{
"epoch": 4.0895637627075025,
"grad_norm": 3.268892765045166,
"learning_rate": 9.111969111969113e-06,
"loss": 0.3505,
"step": 15890
},
{
"epoch": 4.092137434049672,
"grad_norm": 1.6105279922485352,
"learning_rate": 9.086229086229086e-06,
"loss": 0.3177,
"step": 15900
},
{
"epoch": 4.094711105391841,
"grad_norm": 1.8889609575271606,
"learning_rate": 9.060489060489061e-06,
"loss": 0.3904,
"step": 15910
},
{
"epoch": 4.0972847767340115,
"grad_norm": 1.7368106842041016,
"learning_rate": 9.034749034749034e-06,
"loss": 0.339,
"step": 15920
},
{
"epoch": 4.099858448076181,
"grad_norm": 1.3504408597946167,
"learning_rate": 9.00900900900901e-06,
"loss": 0.3543,
"step": 15930
},
{
"epoch": 4.10243211941835,
"grad_norm": 1.0322836637496948,
"learning_rate": 8.983268983268983e-06,
"loss": 0.3498,
"step": 15940
},
{
"epoch": 4.1050057907605195,
"grad_norm": 1.1150479316711426,
"learning_rate": 8.957528957528958e-06,
"loss": 0.3504,
"step": 15950
},
{
"epoch": 4.10757946210269,
"grad_norm": 0.9721202254295349,
"learning_rate": 8.931788931788933e-06,
"loss": 0.3605,
"step": 15960
},
{
"epoch": 4.110153133444859,
"grad_norm": 1.3478281497955322,
"learning_rate": 8.906048906048906e-06,
"loss": 0.322,
"step": 15970
},
{
"epoch": 4.1127268047870285,
"grad_norm": 3.2121946811676025,
"learning_rate": 8.880308880308881e-06,
"loss": 0.3216,
"step": 15980
},
{
"epoch": 4.115300476129199,
"grad_norm": 1.280942440032959,
"learning_rate": 8.854568854568854e-06,
"loss": 0.3064,
"step": 15990
},
{
"epoch": 4.117874147471368,
"grad_norm": 1.1329562664031982,
"learning_rate": 8.82882882882883e-06,
"loss": 0.3252,
"step": 16000
},
{
"epoch": 4.117874147471368,
"eval_loss": 0.8655849695205688,
"eval_runtime": 395.6994,
"eval_samples_per_second": 49.093,
"eval_steps_per_second": 2.456,
"eval_token_accuracy": 0.0003534038369559441,
"step": 16000
},
{
"epoch": 4.120447818813537,
"grad_norm": 1.2837986946105957,
"learning_rate": 8.803088803088803e-06,
"loss": 0.2988,
"step": 16010
},
{
"epoch": 4.123021490155707,
"grad_norm": 1.140343189239502,
"learning_rate": 8.777348777348778e-06,
"loss": 0.3398,
"step": 16020
},
{
"epoch": 4.125595161497877,
"grad_norm": 1.1737910509109497,
"learning_rate": 8.751608751608751e-06,
"loss": 0.3676,
"step": 16030
},
{
"epoch": 4.128168832840046,
"grad_norm": 1.1342344284057617,
"learning_rate": 8.725868725868726e-06,
"loss": 0.3281,
"step": 16040
},
{
"epoch": 4.130742504182216,
"grad_norm": 1.363948941230774,
"learning_rate": 8.7001287001287e-06,
"loss": 0.3137,
"step": 16050
},
{
"epoch": 4.133316175524386,
"grad_norm": 0.9117937684059143,
"learning_rate": 8.674388674388676e-06,
"loss": 0.3463,
"step": 16060
},
{
"epoch": 4.135889846866555,
"grad_norm": 4.627786636352539,
"learning_rate": 8.64864864864865e-06,
"loss": 0.3507,
"step": 16070
},
{
"epoch": 4.138463518208725,
"grad_norm": 1.0469121932983398,
"learning_rate": 8.622908622908624e-06,
"loss": 0.3416,
"step": 16080
},
{
"epoch": 4.141037189550894,
"grad_norm": 4.487536907196045,
"learning_rate": 8.597168597168598e-06,
"loss": 0.3084,
"step": 16090
},
{
"epoch": 4.143610860893064,
"grad_norm": 3.589993953704834,
"learning_rate": 8.571428571428573e-06,
"loss": 0.3498,
"step": 16100
},
{
"epoch": 4.1461845322352335,
"grad_norm": 0.8773334622383118,
"learning_rate": 8.545688545688546e-06,
"loss": 0.3859,
"step": 16110
},
{
"epoch": 4.148758203577403,
"grad_norm": 1.286348819732666,
"learning_rate": 8.519948519948521e-06,
"loss": 0.2771,
"step": 16120
},
{
"epoch": 4.151331874919573,
"grad_norm": 1.3166707754135132,
"learning_rate": 8.494208494208494e-06,
"loss": 0.4015,
"step": 16130
},
{
"epoch": 4.1539055462617425,
"grad_norm": 4.864274978637695,
"learning_rate": 8.46846846846847e-06,
"loss": 0.3171,
"step": 16140
},
{
"epoch": 4.156479217603912,
"grad_norm": 1.0414588451385498,
"learning_rate": 8.442728442728443e-06,
"loss": 0.3396,
"step": 16150
},
{
"epoch": 4.159052888946081,
"grad_norm": 5.264183521270752,
"learning_rate": 8.416988416988418e-06,
"loss": 0.3569,
"step": 16160
},
{
"epoch": 4.161626560288251,
"grad_norm": 1.0647825002670288,
"learning_rate": 8.391248391248393e-06,
"loss": 0.3317,
"step": 16170
},
{
"epoch": 4.164200231630421,
"grad_norm": 0.8864617347717285,
"learning_rate": 8.365508365508366e-06,
"loss": 0.3493,
"step": 16180
},
{
"epoch": 4.16677390297259,
"grad_norm": 1.1304059028625488,
"learning_rate": 8.33976833976834e-06,
"loss": 0.3038,
"step": 16190
},
{
"epoch": 4.16934757431476,
"grad_norm": 1.0398166179656982,
"learning_rate": 8.314028314028314e-06,
"loss": 0.3168,
"step": 16200
},
{
"epoch": 4.17192124565693,
"grad_norm": 0.8544594645500183,
"learning_rate": 8.288288288288289e-06,
"loss": 0.3365,
"step": 16210
},
{
"epoch": 4.174494916999099,
"grad_norm": 0.9230920672416687,
"learning_rate": 8.262548262548262e-06,
"loss": 0.3184,
"step": 16220
},
{
"epoch": 4.177068588341269,
"grad_norm": 6.984750270843506,
"learning_rate": 8.236808236808237e-06,
"loss": 0.3553,
"step": 16230
},
{
"epoch": 4.179642259683439,
"grad_norm": 4.589465141296387,
"learning_rate": 8.21106821106821e-06,
"loss": 0.3263,
"step": 16240
},
{
"epoch": 4.182215931025608,
"grad_norm": 1.004537582397461,
"learning_rate": 8.185328185328186e-06,
"loss": 0.341,
"step": 16250
},
{
"epoch": 4.184789602367777,
"grad_norm": 2.243464231491089,
"learning_rate": 8.159588159588159e-06,
"loss": 0.3709,
"step": 16260
},
{
"epoch": 4.1873632737099475,
"grad_norm": 1.2939341068267822,
"learning_rate": 8.133848133848134e-06,
"loss": 0.2929,
"step": 16270
},
{
"epoch": 4.189936945052117,
"grad_norm": 1.1193703413009644,
"learning_rate": 8.108108108108109e-06,
"loss": 0.3015,
"step": 16280
},
{
"epoch": 4.192510616394286,
"grad_norm": 0.9649505019187927,
"learning_rate": 8.082368082368082e-06,
"loss": 0.3044,
"step": 16290
},
{
"epoch": 4.1950842877364565,
"grad_norm": 1.2722036838531494,
"learning_rate": 8.056628056628057e-06,
"loss": 0.3654,
"step": 16300
},
{
"epoch": 4.197657959078626,
"grad_norm": 1.9029432535171509,
"learning_rate": 8.03088803088803e-06,
"loss": 0.33,
"step": 16310
},
{
"epoch": 4.200231630420795,
"grad_norm": 0.8544790744781494,
"learning_rate": 8.005148005148006e-06,
"loss": 0.3368,
"step": 16320
},
{
"epoch": 4.2028053017629645,
"grad_norm": 0.991193950176239,
"learning_rate": 7.979407979407979e-06,
"loss": 0.3242,
"step": 16330
},
{
"epoch": 4.205378973105135,
"grad_norm": 1.923233151435852,
"learning_rate": 7.953667953667954e-06,
"loss": 0.2868,
"step": 16340
},
{
"epoch": 4.207952644447304,
"grad_norm": 1.3587863445281982,
"learning_rate": 7.927927927927927e-06,
"loss": 0.3379,
"step": 16350
},
{
"epoch": 4.2105263157894735,
"grad_norm": 0.9470755457878113,
"learning_rate": 7.902187902187902e-06,
"loss": 0.311,
"step": 16360
},
{
"epoch": 4.213099987131644,
"grad_norm": 1.3828065395355225,
"learning_rate": 7.876447876447877e-06,
"loss": 0.3017,
"step": 16370
},
{
"epoch": 4.215673658473813,
"grad_norm": 1.4327831268310547,
"learning_rate": 7.850707850707852e-06,
"loss": 0.3226,
"step": 16380
},
{
"epoch": 4.218247329815982,
"grad_norm": 1.1601290702819824,
"learning_rate": 7.824967824967826e-06,
"loss": 0.3352,
"step": 16390
},
{
"epoch": 4.220821001158152,
"grad_norm": 1.0657103061676025,
"learning_rate": 7.7992277992278e-06,
"loss": 0.332,
"step": 16400
},
{
"epoch": 4.223394672500322,
"grad_norm": 3.6614043712615967,
"learning_rate": 7.773487773487774e-06,
"loss": 0.342,
"step": 16410
},
{
"epoch": 4.225968343842491,
"grad_norm": 1.095182180404663,
"learning_rate": 7.747747747747749e-06,
"loss": 0.3488,
"step": 16420
},
{
"epoch": 4.228542015184661,
"grad_norm": 1.0206010341644287,
"learning_rate": 7.722007722007722e-06,
"loss": 0.3571,
"step": 16430
},
{
"epoch": 4.231115686526831,
"grad_norm": 1.0806519985198975,
"learning_rate": 7.696267696267697e-06,
"loss": 0.3845,
"step": 16440
},
{
"epoch": 4.233689357869,
"grad_norm": 1.2475837469100952,
"learning_rate": 7.67052767052767e-06,
"loss": 0.3936,
"step": 16450
},
{
"epoch": 4.23626302921117,
"grad_norm": 2.754807233810425,
"learning_rate": 7.644787644787644e-06,
"loss": 0.3074,
"step": 16460
},
{
"epoch": 4.238836700553339,
"grad_norm": 2.6300132274627686,
"learning_rate": 7.6190476190476205e-06,
"loss": 0.3422,
"step": 16470
},
{
"epoch": 4.241410371895509,
"grad_norm": 0.9944770932197571,
"learning_rate": 7.593307593307595e-06,
"loss": 0.3585,
"step": 16480
},
{
"epoch": 4.2439840432376785,
"grad_norm": 0.8388077616691589,
"learning_rate": 7.567567567567568e-06,
"loss": 0.3416,
"step": 16490
},
{
"epoch": 4.246557714579848,
"grad_norm": 1.02458918094635,
"learning_rate": 7.544401544401544e-06,
"loss": 0.3858,
"step": 16500
},
{
"epoch": 4.249131385922018,
"grad_norm": 1.3384264707565308,
"learning_rate": 7.518661518661518e-06,
"loss": 0.3755,
"step": 16510
},
{
"epoch": 4.2517050572641875,
"grad_norm": 0.9491118788719177,
"learning_rate": 7.492921492921494e-06,
"loss": 0.3789,
"step": 16520
},
{
"epoch": 4.254278728606357,
"grad_norm": 1.3997193574905396,
"learning_rate": 7.467181467181468e-06,
"loss": 0.364,
"step": 16530
},
{
"epoch": 4.256852399948526,
"grad_norm": 1.488587498664856,
"learning_rate": 7.441441441441442e-06,
"loss": 0.3153,
"step": 16540
},
{
"epoch": 4.259426071290696,
"grad_norm": 1.079736590385437,
"learning_rate": 7.415701415701416e-06,
"loss": 0.3447,
"step": 16550
},
{
"epoch": 4.261999742632866,
"grad_norm": 1.142016887664795,
"learning_rate": 7.38996138996139e-06,
"loss": 0.325,
"step": 16560
},
{
"epoch": 4.264573413975035,
"grad_norm": 0.9195720553398132,
"learning_rate": 7.364221364221364e-06,
"loss": 0.3222,
"step": 16570
},
{
"epoch": 4.267147085317205,
"grad_norm": 3.8581366539001465,
"learning_rate": 7.338481338481338e-06,
"loss": 0.3699,
"step": 16580
},
{
"epoch": 4.269720756659375,
"grad_norm": 0.8989285230636597,
"learning_rate": 7.3127413127413125e-06,
"loss": 0.3438,
"step": 16590
},
{
"epoch": 4.272294428001544,
"grad_norm": 1.3981261253356934,
"learning_rate": 7.287001287001287e-06,
"loss": 0.3827,
"step": 16600
},
{
"epoch": 4.274868099343713,
"grad_norm": 1.5550109148025513,
"learning_rate": 7.261261261261261e-06,
"loss": 0.3755,
"step": 16610
},
{
"epoch": 4.277441770685884,
"grad_norm": 1.4705978631973267,
"learning_rate": 7.235521235521237e-06,
"loss": 0.3258,
"step": 16620
},
{
"epoch": 4.280015442028053,
"grad_norm": 1.0708073377609253,
"learning_rate": 7.209781209781211e-06,
"loss": 0.3469,
"step": 16630
},
{
"epoch": 4.282589113370222,
"grad_norm": 4.414210796356201,
"learning_rate": 7.184041184041185e-06,
"loss": 0.3964,
"step": 16640
},
{
"epoch": 4.2851627847123925,
"grad_norm": 1.2815184593200684,
"learning_rate": 7.158301158301159e-06,
"loss": 0.3284,
"step": 16650
},
{
"epoch": 4.287736456054562,
"grad_norm": 1.2706879377365112,
"learning_rate": 7.132561132561133e-06,
"loss": 0.3881,
"step": 16660
},
{
"epoch": 4.290310127396731,
"grad_norm": 0.791149914264679,
"learning_rate": 7.106821106821107e-06,
"loss": 0.33,
"step": 16670
},
{
"epoch": 4.292883798738901,
"grad_norm": 1.350524663925171,
"learning_rate": 7.0810810810810815e-06,
"loss": 0.4255,
"step": 16680
},
{
"epoch": 4.295457470081071,
"grad_norm": 1.2001677751541138,
"learning_rate": 7.055341055341056e-06,
"loss": 0.3677,
"step": 16690
},
{
"epoch": 4.29803114142324,
"grad_norm": 3.5927603244781494,
"learning_rate": 7.02960102960103e-06,
"loss": 0.377,
"step": 16700
},
{
"epoch": 4.3006048127654095,
"grad_norm": 1.3985093832015991,
"learning_rate": 7.003861003861004e-06,
"loss": 0.3129,
"step": 16710
},
{
"epoch": 4.30317848410758,
"grad_norm": 2.9167826175689697,
"learning_rate": 6.978120978120979e-06,
"loss": 0.3275,
"step": 16720
},
{
"epoch": 4.305752155449749,
"grad_norm": 5.278055667877197,
"learning_rate": 6.952380952380953e-06,
"loss": 0.3816,
"step": 16730
},
{
"epoch": 4.3083258267919184,
"grad_norm": 1.1263563632965088,
"learning_rate": 6.926640926640927e-06,
"loss": 0.3447,
"step": 16740
},
{
"epoch": 4.310899498134089,
"grad_norm": 1.0541788339614868,
"learning_rate": 6.9009009009009014e-06,
"loss": 0.3533,
"step": 16750
},
{
"epoch": 4.313473169476258,
"grad_norm": 1.7180668115615845,
"learning_rate": 6.875160875160876e-06,
"loss": 0.3639,
"step": 16760
},
{
"epoch": 4.316046840818427,
"grad_norm": 1.4886486530303955,
"learning_rate": 6.84942084942085e-06,
"loss": 0.3239,
"step": 16770
},
{
"epoch": 4.318620512160597,
"grad_norm": 1.4927681684494019,
"learning_rate": 6.823680823680824e-06,
"loss": 0.327,
"step": 16780
},
{
"epoch": 4.321194183502767,
"grad_norm": 1.2550019025802612,
"learning_rate": 6.797940797940798e-06,
"loss": 0.3321,
"step": 16790
},
{
"epoch": 4.323767854844936,
"grad_norm": 1.218307614326477,
"learning_rate": 6.772200772200772e-06,
"loss": 0.3482,
"step": 16800
},
{
"epoch": 4.326341526187106,
"grad_norm": 1.1296604871749878,
"learning_rate": 6.746460746460746e-06,
"loss": 0.3373,
"step": 16810
},
{
"epoch": 4.328915197529276,
"grad_norm": 3.720290184020996,
"learning_rate": 6.7207207207207205e-06,
"loss": 0.3701,
"step": 16820
},
{
"epoch": 4.331488868871445,
"grad_norm": 1.146220326423645,
"learning_rate": 6.6949806949806955e-06,
"loss": 0.3682,
"step": 16830
},
{
"epoch": 4.334062540213615,
"grad_norm": 1.438725233078003,
"learning_rate": 6.66924066924067e-06,
"loss": 0.337,
"step": 16840
},
{
"epoch": 4.336636211555784,
"grad_norm": 1.264733910560608,
"learning_rate": 6.643500643500644e-06,
"loss": 0.3546,
"step": 16850
},
{
"epoch": 4.339209882897954,
"grad_norm": 1.1695109605789185,
"learning_rate": 6.617760617760618e-06,
"loss": 0.37,
"step": 16860
},
{
"epoch": 4.3417835542401235,
"grad_norm": 1.0620770454406738,
"learning_rate": 6.592020592020592e-06,
"loss": 0.3563,
"step": 16870
},
{
"epoch": 4.344357225582293,
"grad_norm": 4.714122772216797,
"learning_rate": 6.566280566280566e-06,
"loss": 0.3119,
"step": 16880
},
{
"epoch": 4.346930896924463,
"grad_norm": 1.527566909790039,
"learning_rate": 6.54054054054054e-06,
"loss": 0.3352,
"step": 16890
},
{
"epoch": 4.3495045682666325,
"grad_norm": 1.4223473072052002,
"learning_rate": 6.5148005148005146e-06,
"loss": 0.3913,
"step": 16900
},
{
"epoch": 4.352078239608802,
"grad_norm": 1.3415018320083618,
"learning_rate": 6.489060489060489e-06,
"loss": 0.3759,
"step": 16910
},
{
"epoch": 4.354651910950971,
"grad_norm": 1.1462664604187012,
"learning_rate": 6.463320463320463e-06,
"loss": 0.3822,
"step": 16920
},
{
"epoch": 4.357225582293141,
"grad_norm": 1.2110233306884766,
"learning_rate": 6.437580437580439e-06,
"loss": 0.3454,
"step": 16930
},
{
"epoch": 4.359799253635311,
"grad_norm": 4.063595294952393,
"learning_rate": 6.411840411840413e-06,
"loss": 0.3543,
"step": 16940
},
{
"epoch": 4.36237292497748,
"grad_norm": 0.9260187149047852,
"learning_rate": 6.386100386100387e-06,
"loss": 0.3595,
"step": 16950
},
{
"epoch": 4.36494659631965,
"grad_norm": 1.3427493572235107,
"learning_rate": 6.360360360360361e-06,
"loss": 0.366,
"step": 16960
},
{
"epoch": 4.36752026766182,
"grad_norm": 1.0595113039016724,
"learning_rate": 6.334620334620335e-06,
"loss": 0.2813,
"step": 16970
},
{
"epoch": 4.370093939003989,
"grad_norm": 1.5860631465911865,
"learning_rate": 6.3088803088803095e-06,
"loss": 0.3394,
"step": 16980
},
{
"epoch": 4.372667610346159,
"grad_norm": 1.2586485147476196,
"learning_rate": 6.283140283140284e-06,
"loss": 0.3595,
"step": 16990
},
{
"epoch": 4.375241281688329,
"grad_norm": 1.0204342603683472,
"learning_rate": 6.257400257400258e-06,
"loss": 0.3536,
"step": 17000
},
{
"epoch": 4.375241281688329,
"eval_loss": 0.8610925674438477,
"eval_runtime": 395.7656,
"eval_samples_per_second": 49.085,
"eval_steps_per_second": 2.456,
"eval_token_accuracy": 0.00034752009960597937,
"step": 17000
},
{
"epoch": 4.377814953030498,
"grad_norm": 0.714625895023346,
"learning_rate": 6.231660231660232e-06,
"loss": 0.3166,
"step": 17010
},
{
"epoch": 4.380388624372667,
"grad_norm": 1.5543569326400757,
"learning_rate": 6.205920205920206e-06,
"loss": 0.3606,
"step": 17020
},
{
"epoch": 4.3829622957148375,
"grad_norm": 1.7975174188613892,
"learning_rate": 6.18018018018018e-06,
"loss": 0.3124,
"step": 17030
},
{
"epoch": 4.385535967057007,
"grad_norm": 4.076536178588867,
"learning_rate": 6.154440154440154e-06,
"loss": 0.3513,
"step": 17040
},
{
"epoch": 4.388109638399176,
"grad_norm": 1.210050344467163,
"learning_rate": 6.1287001287001285e-06,
"loss": 0.3522,
"step": 17050
},
{
"epoch": 4.3906833097413465,
"grad_norm": 1.1933529376983643,
"learning_rate": 6.1029601029601035e-06,
"loss": 0.3474,
"step": 17060
},
{
"epoch": 4.393256981083516,
"grad_norm": 1.305173635482788,
"learning_rate": 6.077220077220078e-06,
"loss": 0.3271,
"step": 17070
},
{
"epoch": 4.395830652425685,
"grad_norm": 1.0537171363830566,
"learning_rate": 6.051480051480052e-06,
"loss": 0.3602,
"step": 17080
},
{
"epoch": 4.3984043237678545,
"grad_norm": 1.2690879106521606,
"learning_rate": 6.025740025740026e-06,
"loss": 0.356,
"step": 17090
},
{
"epoch": 4.400977995110025,
"grad_norm": 1.4055087566375732,
"learning_rate": 6e-06,
"loss": 0.3896,
"step": 17100
},
{
"epoch": 4.403551666452194,
"grad_norm": 2.5787110328674316,
"learning_rate": 5.974259974259975e-06,
"loss": 0.3773,
"step": 17110
},
{
"epoch": 4.406125337794363,
"grad_norm": 1.122134804725647,
"learning_rate": 5.948519948519949e-06,
"loss": 0.3427,
"step": 17120
},
{
"epoch": 4.408699009136534,
"grad_norm": 0.8836435675621033,
"learning_rate": 5.9227799227799235e-06,
"loss": 0.3142,
"step": 17130
},
{
"epoch": 4.411272680478703,
"grad_norm": 1.6835286617279053,
"learning_rate": 5.897039897039898e-06,
"loss": 0.3993,
"step": 17140
},
{
"epoch": 4.413846351820872,
"grad_norm": 3.622476577758789,
"learning_rate": 5.871299871299872e-06,
"loss": 0.331,
"step": 17150
},
{
"epoch": 4.416420023163042,
"grad_norm": 1.1817376613616943,
"learning_rate": 5.845559845559846e-06,
"loss": 0.356,
"step": 17160
},
{
"epoch": 4.418993694505212,
"grad_norm": 1.0736690759658813,
"learning_rate": 5.81981981981982e-06,
"loss": 0.4044,
"step": 17170
},
{
"epoch": 4.421567365847381,
"grad_norm": 1.3568885326385498,
"learning_rate": 5.794079794079794e-06,
"loss": 0.3515,
"step": 17180
},
{
"epoch": 4.424141037189551,
"grad_norm": 1.0532563924789429,
"learning_rate": 5.768339768339768e-06,
"loss": 0.3542,
"step": 17190
},
{
"epoch": 4.426714708531721,
"grad_norm": 1.5962337255477905,
"learning_rate": 5.7425997425997425e-06,
"loss": 0.3249,
"step": 17200
},
{
"epoch": 4.42928837987389,
"grad_norm": 1.8830443620681763,
"learning_rate": 5.716859716859717e-06,
"loss": 0.3256,
"step": 17210
},
{
"epoch": 4.43186205121606,
"grad_norm": 1.0833598375320435,
"learning_rate": 5.691119691119692e-06,
"loss": 0.3783,
"step": 17220
},
{
"epoch": 4.434435722558229,
"grad_norm": 1.3849422931671143,
"learning_rate": 5.665379665379666e-06,
"loss": 0.3494,
"step": 17230
},
{
"epoch": 4.437009393900399,
"grad_norm": 2.959022045135498,
"learning_rate": 5.63963963963964e-06,
"loss": 0.3622,
"step": 17240
},
{
"epoch": 4.4395830652425685,
"grad_norm": 0.9271379113197327,
"learning_rate": 5.613899613899614e-06,
"loss": 0.3807,
"step": 17250
},
{
"epoch": 4.442156736584738,
"grad_norm": 1.0124372243881226,
"learning_rate": 5.588159588159588e-06,
"loss": 0.3751,
"step": 17260
},
{
"epoch": 4.444730407926908,
"grad_norm": 3.5358119010925293,
"learning_rate": 5.562419562419563e-06,
"loss": 0.2874,
"step": 17270
},
{
"epoch": 4.4473040792690774,
"grad_norm": 0.9238685369491577,
"learning_rate": 5.5366795366795374e-06,
"loss": 0.3796,
"step": 17280
},
{
"epoch": 4.449877750611247,
"grad_norm": 1.2194762229919434,
"learning_rate": 5.510939510939512e-06,
"loss": 0.3643,
"step": 17290
},
{
"epoch": 4.452451421953416,
"grad_norm": 1.2170203924179077,
"learning_rate": 5.485199485199486e-06,
"loss": 0.3835,
"step": 17300
},
{
"epoch": 4.455025093295586,
"grad_norm": 1.5656147003173828,
"learning_rate": 5.45945945945946e-06,
"loss": 0.3385,
"step": 17310
},
{
"epoch": 4.457598764637756,
"grad_norm": 1.351452112197876,
"learning_rate": 5.433719433719434e-06,
"loss": 0.3646,
"step": 17320
},
{
"epoch": 4.460172435979925,
"grad_norm": 1.1364386081695557,
"learning_rate": 5.407979407979408e-06,
"loss": 0.3132,
"step": 17330
},
{
"epoch": 4.462746107322095,
"grad_norm": 1.2257845401763916,
"learning_rate": 5.382239382239382e-06,
"loss": 0.3266,
"step": 17340
},
{
"epoch": 4.465319778664265,
"grad_norm": 1.087573766708374,
"learning_rate": 5.3564993564993565e-06,
"loss": 0.3644,
"step": 17350
},
{
"epoch": 4.467893450006434,
"grad_norm": 1.2101587057113647,
"learning_rate": 5.330759330759331e-06,
"loss": 0.3456,
"step": 17360
},
{
"epoch": 4.470467121348603,
"grad_norm": 1.4573382139205933,
"learning_rate": 5.305019305019306e-06,
"loss": 0.3113,
"step": 17370
},
{
"epoch": 4.473040792690774,
"grad_norm": 0.8906600475311279,
"learning_rate": 5.27927927927928e-06,
"loss": 0.3345,
"step": 17380
},
{
"epoch": 4.475614464032943,
"grad_norm": 1.079119086265564,
"learning_rate": 5.253539253539254e-06,
"loss": 0.3668,
"step": 17390
},
{
"epoch": 4.478188135375112,
"grad_norm": 1.0174261331558228,
"learning_rate": 5.227799227799228e-06,
"loss": 0.2963,
"step": 17400
},
{
"epoch": 4.4807618067172825,
"grad_norm": 5.470022678375244,
"learning_rate": 5.202059202059202e-06,
"loss": 0.39,
"step": 17410
},
{
"epoch": 4.483335478059452,
"grad_norm": 3.2277028560638428,
"learning_rate": 5.176319176319176e-06,
"loss": 0.3342,
"step": 17420
},
{
"epoch": 4.485909149401621,
"grad_norm": 1.0484484434127808,
"learning_rate": 5.150579150579151e-06,
"loss": 0.3536,
"step": 17430
},
{
"epoch": 4.488482820743791,
"grad_norm": 3.3900933265686035,
"learning_rate": 5.1248391248391256e-06,
"loss": 0.3409,
"step": 17440
},
{
"epoch": 4.491056492085961,
"grad_norm": 1.2388783693313599,
"learning_rate": 5.0990990990991e-06,
"loss": 0.3389,
"step": 17450
},
{
"epoch": 4.49363016342813,
"grad_norm": 1.4199916124343872,
"learning_rate": 5.073359073359073e-06,
"loss": 0.3302,
"step": 17460
},
{
"epoch": 4.4962038347702995,
"grad_norm": 1.3507987260818481,
"learning_rate": 5.047619047619047e-06,
"loss": 0.367,
"step": 17470
},
{
"epoch": 4.49877750611247,
"grad_norm": 1.1383529901504517,
"learning_rate": 5.021879021879022e-06,
"loss": 0.3361,
"step": 17480
},
{
"epoch": 4.501351177454639,
"grad_norm": 1.3563300371170044,
"learning_rate": 4.996138996138996e-06,
"loss": 0.3596,
"step": 17490
},
{
"epoch": 4.503924848796808,
"grad_norm": 1.2805308103561401,
"learning_rate": 4.9703989703989705e-06,
"loss": 0.338,
"step": 17500
},
{
"epoch": 4.506498520138978,
"grad_norm": 1.186610460281372,
"learning_rate": 4.944658944658945e-06,
"loss": 0.4018,
"step": 17510
},
{
"epoch": 4.509072191481148,
"grad_norm": 1.3856215476989746,
"learning_rate": 4.918918918918919e-06,
"loss": 0.3387,
"step": 17520
},
{
"epoch": 4.511645862823317,
"grad_norm": 1.4942715167999268,
"learning_rate": 4.893178893178894e-06,
"loss": 0.3774,
"step": 17530
},
{
"epoch": 4.514219534165487,
"grad_norm": 1.3784438371658325,
"learning_rate": 4.867438867438868e-06,
"loss": 0.3103,
"step": 17540
},
{
"epoch": 4.516793205507657,
"grad_norm": 1.6347748041152954,
"learning_rate": 4.841698841698842e-06,
"loss": 0.3187,
"step": 17550
},
{
"epoch": 4.519366876849826,
"grad_norm": 3.5454697608947754,
"learning_rate": 4.815958815958816e-06,
"loss": 0.3355,
"step": 17560
},
{
"epoch": 4.521940548191996,
"grad_norm": 1.6713000535964966,
"learning_rate": 4.79021879021879e-06,
"loss": 0.3521,
"step": 17570
},
{
"epoch": 4.524514219534165,
"grad_norm": 1.2210255861282349,
"learning_rate": 4.764478764478765e-06,
"loss": 0.3532,
"step": 17580
},
{
"epoch": 4.527087890876335,
"grad_norm": 1.9054685831069946,
"learning_rate": 4.7387387387387395e-06,
"loss": 0.3597,
"step": 17590
},
{
"epoch": 4.529661562218505,
"grad_norm": 1.692408561706543,
"learning_rate": 4.712998712998713e-06,
"loss": 0.329,
"step": 17600
},
{
"epoch": 4.532235233560674,
"grad_norm": 1.5581741333007812,
"learning_rate": 4.687258687258687e-06,
"loss": 0.3693,
"step": 17610
},
{
"epoch": 4.534808904902844,
"grad_norm": 1.223633050918579,
"learning_rate": 4.661518661518661e-06,
"loss": 0.3536,
"step": 17620
},
{
"epoch": 4.5373825762450135,
"grad_norm": 1.719165325164795,
"learning_rate": 4.635778635778636e-06,
"loss": 0.3352,
"step": 17630
},
{
"epoch": 4.539956247587183,
"grad_norm": 1.2446945905685425,
"learning_rate": 4.61003861003861e-06,
"loss": 0.305,
"step": 17640
},
{
"epoch": 4.542529918929353,
"grad_norm": 1.1125750541687012,
"learning_rate": 4.5842985842985844e-06,
"loss": 0.3379,
"step": 17650
},
{
"epoch": 4.545103590271522,
"grad_norm": 1.183079719543457,
"learning_rate": 4.558558558558559e-06,
"loss": 0.4016,
"step": 17660
},
{
"epoch": 4.547677261613692,
"grad_norm": 1.196256399154663,
"learning_rate": 4.532818532818533e-06,
"loss": 0.3503,
"step": 17670
},
{
"epoch": 4.550250932955862,
"grad_norm": 2.58664608001709,
"learning_rate": 4.507078507078507e-06,
"loss": 0.3455,
"step": 17680
},
{
"epoch": 4.552824604298031,
"grad_norm": 1.1959803104400635,
"learning_rate": 4.481338481338482e-06,
"loss": 0.3916,
"step": 17690
},
{
"epoch": 4.555398275640201,
"grad_norm": 0.9203136563301086,
"learning_rate": 4.455598455598456e-06,
"loss": 0.3098,
"step": 17700
},
{
"epoch": 4.55797194698237,
"grad_norm": 0.8788485527038574,
"learning_rate": 4.42985842985843e-06,
"loss": 0.3033,
"step": 17710
},
{
"epoch": 4.56054561832454,
"grad_norm": 0.9711852669715881,
"learning_rate": 4.404118404118404e-06,
"loss": 0.3704,
"step": 17720
},
{
"epoch": 4.56311928966671,
"grad_norm": 0.947658360004425,
"learning_rate": 4.3783783783783785e-06,
"loss": 0.2983,
"step": 17730
},
{
"epoch": 4.565692961008879,
"grad_norm": 0.8453003168106079,
"learning_rate": 4.3526383526383535e-06,
"loss": 0.2935,
"step": 17740
},
{
"epoch": 4.568266632351049,
"grad_norm": 1.273728370666504,
"learning_rate": 4.326898326898327e-06,
"loss": 0.3509,
"step": 17750
},
{
"epoch": 4.570840303693219,
"grad_norm": 1.0643731355667114,
"learning_rate": 4.301158301158301e-06,
"loss": 0.3753,
"step": 17760
},
{
"epoch": 4.573413975035388,
"grad_norm": 1.152840256690979,
"learning_rate": 4.275418275418275e-06,
"loss": 0.3816,
"step": 17770
},
{
"epoch": 4.575987646377557,
"grad_norm": 1.0938299894332886,
"learning_rate": 4.249678249678249e-06,
"loss": 0.3666,
"step": 17780
},
{
"epoch": 4.5785613177197275,
"grad_norm": 1.659525752067566,
"learning_rate": 4.223938223938224e-06,
"loss": 0.3586,
"step": 17790
},
{
"epoch": 4.581134989061897,
"grad_norm": 1.3359277248382568,
"learning_rate": 4.1981981981981984e-06,
"loss": 0.3487,
"step": 17800
},
{
"epoch": 4.583708660404066,
"grad_norm": 1.0927700996398926,
"learning_rate": 4.1724581724581726e-06,
"loss": 0.3639,
"step": 17810
},
{
"epoch": 4.5862823317462365,
"grad_norm": 0.907636284828186,
"learning_rate": 4.146718146718147e-06,
"loss": 0.3088,
"step": 17820
},
{
"epoch": 4.588856003088406,
"grad_norm": 1.650233268737793,
"learning_rate": 4.120978120978121e-06,
"loss": 0.3611,
"step": 17830
},
{
"epoch": 4.591429674430575,
"grad_norm": 0.9985641837120056,
"learning_rate": 4.095238095238096e-06,
"loss": 0.3119,
"step": 17840
},
{
"epoch": 4.5940033457727445,
"grad_norm": 1.0793542861938477,
"learning_rate": 4.06949806949807e-06,
"loss": 0.3594,
"step": 17850
},
{
"epoch": 4.596577017114915,
"grad_norm": 1.0795053243637085,
"learning_rate": 4.043758043758044e-06,
"loss": 0.3464,
"step": 17860
},
{
"epoch": 4.599150688457084,
"grad_norm": 0.9072697758674622,
"learning_rate": 4.018018018018018e-06,
"loss": 0.3176,
"step": 17870
},
{
"epoch": 4.601724359799253,
"grad_norm": 2.6680212020874023,
"learning_rate": 3.9922779922779925e-06,
"loss": 0.337,
"step": 17880
},
{
"epoch": 4.604298031141424,
"grad_norm": 1.0229748487472534,
"learning_rate": 3.966537966537967e-06,
"loss": 0.3516,
"step": 17890
},
{
"epoch": 4.606871702483593,
"grad_norm": 3.6243858337402344,
"learning_rate": 3.940797940797941e-06,
"loss": 0.3331,
"step": 17900
},
{
"epoch": 4.609445373825762,
"grad_norm": 1.2276082038879395,
"learning_rate": 3.915057915057915e-06,
"loss": 0.3215,
"step": 17910
},
{
"epoch": 4.612019045167932,
"grad_norm": 1.2664270401000977,
"learning_rate": 3.889317889317889e-06,
"loss": 0.3782,
"step": 17920
},
{
"epoch": 4.614592716510102,
"grad_norm": 1.5127272605895996,
"learning_rate": 3.863577863577863e-06,
"loss": 0.3471,
"step": 17930
},
{
"epoch": 4.617166387852271,
"grad_norm": 1.5340853929519653,
"learning_rate": 3.837837837837837e-06,
"loss": 0.3593,
"step": 17940
},
{
"epoch": 4.619740059194441,
"grad_norm": 0.8411980867385864,
"learning_rate": 3.8120978120978124e-06,
"loss": 0.3846,
"step": 17950
},
{
"epoch": 4.622313730536611,
"grad_norm": 3.0642356872558594,
"learning_rate": 3.7863577863577866e-06,
"loss": 0.372,
"step": 17960
},
{
"epoch": 4.62488740187878,
"grad_norm": 2.399491786956787,
"learning_rate": 3.7606177606177607e-06,
"loss": 0.3127,
"step": 17970
},
{
"epoch": 4.62746107322095,
"grad_norm": 1.3899736404418945,
"learning_rate": 3.734877734877735e-06,
"loss": 0.3518,
"step": 17980
},
{
"epoch": 4.630034744563119,
"grad_norm": 2.5960450172424316,
"learning_rate": 3.709137709137709e-06,
"loss": 0.3246,
"step": 17990
},
{
"epoch": 4.632608415905289,
"grad_norm": 1.2150206565856934,
"learning_rate": 3.683397683397684e-06,
"loss": 0.3655,
"step": 18000
},
{
"epoch": 4.632608415905289,
"eval_loss": 0.8564029932022095,
"eval_runtime": 395.6976,
"eval_samples_per_second": 49.093,
"eval_steps_per_second": 2.456,
"eval_token_accuracy": 0.00035587121068334865,
"step": 18000
},
{
"epoch": 4.6351820872474585,
"grad_norm": 1.110227108001709,
"learning_rate": 3.6576576576576577e-06,
"loss": 0.3679,
"step": 18010
},
{
"epoch": 4.637755758589628,
"grad_norm": 1.2694969177246094,
"learning_rate": 3.6344916344916345e-06,
"loss": 0.4168,
"step": 18020
},
{
"epoch": 4.640329429931798,
"grad_norm": 1.5729743242263794,
"learning_rate": 3.6087516087516086e-06,
"loss": 0.3952,
"step": 18030
},
{
"epoch": 4.642903101273967,
"grad_norm": 1.1754688024520874,
"learning_rate": 3.5830115830115836e-06,
"loss": 0.3475,
"step": 18040
},
{
"epoch": 4.645476772616137,
"grad_norm": 1.3123219013214111,
"learning_rate": 3.5572715572715578e-06,
"loss": 0.3589,
"step": 18050
},
{
"epoch": 4.648050443958306,
"grad_norm": 3.629625082015991,
"learning_rate": 3.5315315315315315e-06,
"loss": 0.3261,
"step": 18060
},
{
"epoch": 4.650624115300476,
"grad_norm": 1.0868263244628906,
"learning_rate": 3.5057915057915057e-06,
"loss": 0.3427,
"step": 18070
},
{
"epoch": 4.653197786642646,
"grad_norm": 1.1146104335784912,
"learning_rate": 3.48005148005148e-06,
"loss": 0.3577,
"step": 18080
},
{
"epoch": 4.655771457984815,
"grad_norm": 1.4053925275802612,
"learning_rate": 3.454311454311455e-06,
"loss": 0.3473,
"step": 18090
},
{
"epoch": 4.658345129326985,
"grad_norm": 1.5852116346359253,
"learning_rate": 3.428571428571429e-06,
"loss": 0.3616,
"step": 18100
},
{
"epoch": 4.660918800669155,
"grad_norm": 1.1257933378219604,
"learning_rate": 3.402831402831403e-06,
"loss": 0.3035,
"step": 18110
},
{
"epoch": 4.663492472011324,
"grad_norm": 1.2144465446472168,
"learning_rate": 3.3770913770913773e-06,
"loss": 0.3089,
"step": 18120
},
{
"epoch": 4.666066143353493,
"grad_norm": 1.458746314048767,
"learning_rate": 3.3513513513513514e-06,
"loss": 0.3289,
"step": 18130
},
{
"epoch": 4.668639814695664,
"grad_norm": 1.8117784261703491,
"learning_rate": 3.325611325611326e-06,
"loss": 0.3345,
"step": 18140
},
{
"epoch": 4.671213486037833,
"grad_norm": 1.3290542364120483,
"learning_rate": 3.2998712998713e-06,
"loss": 0.2998,
"step": 18150
},
{
"epoch": 4.673787157380002,
"grad_norm": 1.1426622867584229,
"learning_rate": 3.2741312741312743e-06,
"loss": 0.364,
"step": 18160
},
{
"epoch": 4.6763608287221725,
"grad_norm": 1.2317893505096436,
"learning_rate": 3.2483912483912484e-06,
"loss": 0.2657,
"step": 18170
},
{
"epoch": 4.678934500064342,
"grad_norm": 1.211256980895996,
"learning_rate": 3.2226512226512226e-06,
"loss": 0.4204,
"step": 18180
},
{
"epoch": 4.681508171406511,
"grad_norm": 1.5022300481796265,
"learning_rate": 3.1969111969111968e-06,
"loss": 0.3907,
"step": 18190
},
{
"epoch": 4.684081842748681,
"grad_norm": 1.2132724523544312,
"learning_rate": 3.1711711711711718e-06,
"loss": 0.3386,
"step": 18200
},
{
"epoch": 4.686655514090851,
"grad_norm": 1.2516412734985352,
"learning_rate": 3.1454311454311455e-06,
"loss": 0.3616,
"step": 18210
},
{
"epoch": 4.68922918543302,
"grad_norm": 1.1552441120147705,
"learning_rate": 3.1196911196911196e-06,
"loss": 0.3508,
"step": 18220
},
{
"epoch": 4.6918028567751895,
"grad_norm": 3.639354705810547,
"learning_rate": 3.093951093951094e-06,
"loss": 0.3425,
"step": 18230
},
{
"epoch": 4.69437652811736,
"grad_norm": 3.8008406162261963,
"learning_rate": 3.0682110682110684e-06,
"loss": 0.3468,
"step": 18240
},
{
"epoch": 4.696950199459529,
"grad_norm": 1.2858436107635498,
"learning_rate": 3.0424710424710425e-06,
"loss": 0.3806,
"step": 18250
},
{
"epoch": 4.699523870801698,
"grad_norm": 2.4241316318511963,
"learning_rate": 3.016731016731017e-06,
"loss": 0.3471,
"step": 18260
},
{
"epoch": 4.702097542143868,
"grad_norm": 1.0600461959838867,
"learning_rate": 2.9909909909909912e-06,
"loss": 0.301,
"step": 18270
},
{
"epoch": 4.704671213486038,
"grad_norm": 1.1587661504745483,
"learning_rate": 2.9652509652509654e-06,
"loss": 0.3476,
"step": 18280
},
{
"epoch": 4.707244884828207,
"grad_norm": 2.059361696243286,
"learning_rate": 2.9395109395109395e-06,
"loss": 0.3625,
"step": 18290
},
{
"epoch": 4.709818556170377,
"grad_norm": 1.0103809833526611,
"learning_rate": 2.9137709137709137e-06,
"loss": 0.3285,
"step": 18300
},
{
"epoch": 4.712392227512547,
"grad_norm": 3.1036524772644043,
"learning_rate": 2.8880308880308883e-06,
"loss": 0.4137,
"step": 18310
},
{
"epoch": 4.714965898854716,
"grad_norm": 1.7626713514328003,
"learning_rate": 2.8622908622908624e-06,
"loss": 0.3824,
"step": 18320
},
{
"epoch": 4.717539570196886,
"grad_norm": 1.179608941078186,
"learning_rate": 2.8365508365508366e-06,
"loss": 0.3257,
"step": 18330
},
{
"epoch": 4.720113241539055,
"grad_norm": 1.0469659566879272,
"learning_rate": 2.810810810810811e-06,
"loss": 0.356,
"step": 18340
},
{
"epoch": 4.722686912881225,
"grad_norm": 1.0116140842437744,
"learning_rate": 2.7850707850707853e-06,
"loss": 0.3352,
"step": 18350
},
{
"epoch": 4.725260584223395,
"grad_norm": 1.530369520187378,
"learning_rate": 2.7593307593307595e-06,
"loss": 0.4061,
"step": 18360
},
{
"epoch": 4.727834255565564,
"grad_norm": 1.096043586730957,
"learning_rate": 2.7335907335907336e-06,
"loss": 0.3633,
"step": 18370
},
{
"epoch": 4.730407926907734,
"grad_norm": 2.9259002208709717,
"learning_rate": 2.7078507078507078e-06,
"loss": 0.3762,
"step": 18380
},
{
"epoch": 4.7329815982499035,
"grad_norm": 2.655984878540039,
"learning_rate": 2.6821106821106823e-06,
"loss": 0.331,
"step": 18390
},
{
"epoch": 4.735555269592073,
"grad_norm": 1.2596089839935303,
"learning_rate": 2.6563706563706565e-06,
"loss": 0.314,
"step": 18400
},
{
"epoch": 4.738128940934243,
"grad_norm": 1.149182915687561,
"learning_rate": 2.6306306306306306e-06,
"loss": 0.3377,
"step": 18410
},
{
"epoch": 4.740702612276412,
"grad_norm": 0.9628931879997253,
"learning_rate": 2.6048906048906052e-06,
"loss": 0.404,
"step": 18420
},
{
"epoch": 4.743276283618582,
"grad_norm": 1.304672360420227,
"learning_rate": 2.5791505791505794e-06,
"loss": 0.3934,
"step": 18430
},
{
"epoch": 4.745849954960751,
"grad_norm": 1.1351431608200073,
"learning_rate": 2.5534105534105535e-06,
"loss": 0.3549,
"step": 18440
},
{
"epoch": 4.748423626302921,
"grad_norm": 3.7342593669891357,
"learning_rate": 2.5276705276705277e-06,
"loss": 0.3391,
"step": 18450
},
{
"epoch": 4.750997297645091,
"grad_norm": 1.1187039613723755,
"learning_rate": 2.501930501930502e-06,
"loss": 0.341,
"step": 18460
},
{
"epoch": 4.75357096898726,
"grad_norm": 1.6478817462921143,
"learning_rate": 2.4761904761904764e-06,
"loss": 0.3171,
"step": 18470
},
{
"epoch": 4.75614464032943,
"grad_norm": 1.0323374271392822,
"learning_rate": 2.4504504504504506e-06,
"loss": 0.2878,
"step": 18480
},
{
"epoch": 4.7587183116716,
"grad_norm": 1.34542977809906,
"learning_rate": 2.424710424710425e-06,
"loss": 0.33,
"step": 18490
},
{
"epoch": 4.761291983013769,
"grad_norm": 1.1959524154663086,
"learning_rate": 2.3989703989703993e-06,
"loss": 0.3031,
"step": 18500
},
{
"epoch": 4.763865654355939,
"grad_norm": 1.4889193773269653,
"learning_rate": 2.3732303732303734e-06,
"loss": 0.386,
"step": 18510
},
{
"epoch": 4.766439325698109,
"grad_norm": 1.1365255117416382,
"learning_rate": 2.3474903474903476e-06,
"loss": 0.3173,
"step": 18520
},
{
"epoch": 4.769012997040278,
"grad_norm": 1.1125775575637817,
"learning_rate": 2.3217503217503217e-06,
"loss": 0.37,
"step": 18530
},
{
"epoch": 4.771586668382447,
"grad_norm": 1.230258584022522,
"learning_rate": 2.296010296010296e-06,
"loss": 0.3115,
"step": 18540
},
{
"epoch": 4.7741603397246175,
"grad_norm": 1.1539350748062134,
"learning_rate": 2.2702702702702705e-06,
"loss": 0.3639,
"step": 18550
},
{
"epoch": 4.776734011066787,
"grad_norm": 1.2653287649154663,
"learning_rate": 2.2445302445302446e-06,
"loss": 0.3541,
"step": 18560
},
{
"epoch": 4.779307682408956,
"grad_norm": 0.9209233522415161,
"learning_rate": 2.218790218790219e-06,
"loss": 0.3348,
"step": 18570
},
{
"epoch": 4.781881353751126,
"grad_norm": 0.895807683467865,
"learning_rate": 2.1930501930501933e-06,
"loss": 0.3351,
"step": 18580
},
{
"epoch": 4.784455025093296,
"grad_norm": 0.8402043581008911,
"learning_rate": 2.167310167310167e-06,
"loss": 0.3198,
"step": 18590
},
{
"epoch": 4.787028696435465,
"grad_norm": 2.4166226387023926,
"learning_rate": 2.1415701415701416e-06,
"loss": 0.3464,
"step": 18600
},
{
"epoch": 4.7896023677776345,
"grad_norm": 2.75046706199646,
"learning_rate": 2.115830115830116e-06,
"loss": 0.3046,
"step": 18610
},
{
"epoch": 4.792176039119805,
"grad_norm": 1.4120715856552124,
"learning_rate": 2.0900900900900904e-06,
"loss": 0.387,
"step": 18620
},
{
"epoch": 4.794749710461974,
"grad_norm": 1.039160132408142,
"learning_rate": 2.0643500643500645e-06,
"loss": 0.3805,
"step": 18630
},
{
"epoch": 4.797323381804143,
"grad_norm": 3.3216381072998047,
"learning_rate": 2.0386100386100387e-06,
"loss": 0.3687,
"step": 18640
},
{
"epoch": 4.799897053146314,
"grad_norm": 1.0428948402404785,
"learning_rate": 2.0128700128700133e-06,
"loss": 0.3146,
"step": 18650
},
{
"epoch": 4.802470724488483,
"grad_norm": 1.2070870399475098,
"learning_rate": 1.9871299871299874e-06,
"loss": 0.3448,
"step": 18660
},
{
"epoch": 4.805044395830652,
"grad_norm": 0.9999447464942932,
"learning_rate": 1.961389961389961e-06,
"loss": 0.3139,
"step": 18670
},
{
"epoch": 4.807618067172822,
"grad_norm": 0.7918708920478821,
"learning_rate": 1.9356499356499357e-06,
"loss": 0.355,
"step": 18680
},
{
"epoch": 4.810191738514992,
"grad_norm": 1.158919334411621,
"learning_rate": 1.90990990990991e-06,
"loss": 0.3108,
"step": 18690
},
{
"epoch": 4.812765409857161,
"grad_norm": 1.4351342916488647,
"learning_rate": 1.8841698841698844e-06,
"loss": 0.3367,
"step": 18700
},
{
"epoch": 4.815339081199331,
"grad_norm": 1.1386419534683228,
"learning_rate": 1.8584298584298586e-06,
"loss": 0.3189,
"step": 18710
},
{
"epoch": 4.817912752541501,
"grad_norm": 1.5254247188568115,
"learning_rate": 1.8326898326898327e-06,
"loss": 0.3336,
"step": 18720
},
{
"epoch": 4.82048642388367,
"grad_norm": 1.0063539743423462,
"learning_rate": 1.8069498069498071e-06,
"loss": 0.3185,
"step": 18730
},
{
"epoch": 4.82306009522584,
"grad_norm": 1.1583902835845947,
"learning_rate": 1.7812097812097813e-06,
"loss": 0.3918,
"step": 18740
},
{
"epoch": 4.825633766568009,
"grad_norm": 1.1044045686721802,
"learning_rate": 1.7554697554697556e-06,
"loss": 0.2989,
"step": 18750
},
{
"epoch": 4.828207437910179,
"grad_norm": 1.114070177078247,
"learning_rate": 1.7297297297297298e-06,
"loss": 0.3586,
"step": 18760
},
{
"epoch": 4.8307811092523485,
"grad_norm": 1.4722037315368652,
"learning_rate": 1.703989703989704e-06,
"loss": 0.364,
"step": 18770
},
{
"epoch": 4.833354780594518,
"grad_norm": 1.1275004148483276,
"learning_rate": 1.6782496782496785e-06,
"loss": 0.3595,
"step": 18780
},
{
"epoch": 4.835928451936688,
"grad_norm": 1.0008904933929443,
"learning_rate": 1.6525096525096527e-06,
"loss": 0.3347,
"step": 18790
},
{
"epoch": 4.838502123278857,
"grad_norm": 0.9728559851646423,
"learning_rate": 1.6267696267696266e-06,
"loss": 0.3359,
"step": 18800
},
{
"epoch": 4.841075794621027,
"grad_norm": 1.343985915184021,
"learning_rate": 1.6010296010296012e-06,
"loss": 0.3496,
"step": 18810
},
{
"epoch": 4.843649465963196,
"grad_norm": 0.8457748889923096,
"learning_rate": 1.5752895752895753e-06,
"loss": 0.3495,
"step": 18820
},
{
"epoch": 4.846223137305366,
"grad_norm": 4.120772838592529,
"learning_rate": 1.5495495495495495e-06,
"loss": 0.3289,
"step": 18830
},
{
"epoch": 4.848796808647536,
"grad_norm": 1.3416566848754883,
"learning_rate": 1.5238095238095238e-06,
"loss": 0.341,
"step": 18840
},
{
"epoch": 4.851370479989705,
"grad_norm": 2.817333936691284,
"learning_rate": 1.4980694980694982e-06,
"loss": 0.3213,
"step": 18850
},
{
"epoch": 4.853944151331875,
"grad_norm": 1.17255699634552,
"learning_rate": 1.4723294723294726e-06,
"loss": 0.3291,
"step": 18860
},
{
"epoch": 4.856517822674045,
"grad_norm": 2.559112787246704,
"learning_rate": 1.4465894465894465e-06,
"loss": 0.2838,
"step": 18870
},
{
"epoch": 4.859091494016214,
"grad_norm": 1.171962022781372,
"learning_rate": 1.4208494208494209e-06,
"loss": 0.3366,
"step": 18880
},
{
"epoch": 4.861665165358383,
"grad_norm": 1.125060796737671,
"learning_rate": 1.3951093951093952e-06,
"loss": 0.3411,
"step": 18890
},
{
"epoch": 4.864238836700554,
"grad_norm": 1.2087688446044922,
"learning_rate": 1.3693693693693696e-06,
"loss": 0.2909,
"step": 18900
},
{
"epoch": 4.866812508042723,
"grad_norm": 1.3647404909133911,
"learning_rate": 1.3436293436293435e-06,
"loss": 0.3347,
"step": 18910
},
{
"epoch": 4.869386179384892,
"grad_norm": 1.1150912046432495,
"learning_rate": 1.317889317889318e-06,
"loss": 0.3467,
"step": 18920
},
{
"epoch": 4.8719598507270625,
"grad_norm": 1.2216849327087402,
"learning_rate": 1.2921492921492923e-06,
"loss": 0.3459,
"step": 18930
},
{
"epoch": 4.874533522069232,
"grad_norm": 1.0959711074829102,
"learning_rate": 1.2664092664092664e-06,
"loss": 0.3167,
"step": 18940
},
{
"epoch": 4.877107193411401,
"grad_norm": 1.3392401933670044,
"learning_rate": 1.2406692406692406e-06,
"loss": 0.2993,
"step": 18950
},
{
"epoch": 4.8796808647535705,
"grad_norm": 1.1086993217468262,
"learning_rate": 1.214929214929215e-06,
"loss": 0.3165,
"step": 18960
},
{
"epoch": 4.882254536095741,
"grad_norm": 1.0389692783355713,
"learning_rate": 1.1891891891891893e-06,
"loss": 0.349,
"step": 18970
},
{
"epoch": 4.88482820743791,
"grad_norm": 1.053175449371338,
"learning_rate": 1.1634491634491635e-06,
"loss": 0.3713,
"step": 18980
},
{
"epoch": 4.8874018787800795,
"grad_norm": 1.4085311889648438,
"learning_rate": 1.1377091377091378e-06,
"loss": 0.3424,
"step": 18990
},
{
"epoch": 4.88997555012225,
"grad_norm": 1.0388787984848022,
"learning_rate": 1.111969111969112e-06,
"loss": 0.3223,
"step": 19000
},
{
"epoch": 4.88997555012225,
"eval_loss": 0.8570228219032288,
"eval_runtime": 395.7978,
"eval_samples_per_second": 49.081,
"eval_steps_per_second": 2.456,
"eval_token_accuracy": 0.00035359363493497523,
"step": 19000
},
{
"epoch": 4.892549221464419,
"grad_norm": 1.3370487689971924,
"learning_rate": 1.0862290862290863e-06,
"loss": 0.3524,
"step": 19010
},
{
"epoch": 4.895122892806588,
"grad_norm": 1.1266289949417114,
"learning_rate": 1.0604890604890605e-06,
"loss": 0.3193,
"step": 19020
},
{
"epoch": 4.897696564148758,
"grad_norm": 1.6796464920043945,
"learning_rate": 1.0347490347490348e-06,
"loss": 0.3117,
"step": 19030
},
{
"epoch": 4.900270235490928,
"grad_norm": 1.1467808485031128,
"learning_rate": 1.009009009009009e-06,
"loss": 0.366,
"step": 19040
},
{
"epoch": 4.902843906833097,
"grad_norm": 0.8519312739372253,
"learning_rate": 9.832689832689834e-07,
"loss": 0.2902,
"step": 19050
},
{
"epoch": 4.905417578175267,
"grad_norm": 1.30669105052948,
"learning_rate": 9.575289575289575e-07,
"loss": 0.3016,
"step": 19060
},
{
"epoch": 4.907991249517437,
"grad_norm": 1.3413194417953491,
"learning_rate": 9.317889317889319e-07,
"loss": 0.3301,
"step": 19070
},
{
"epoch": 4.910564920859606,
"grad_norm": 1.2429314851760864,
"learning_rate": 9.06048906048906e-07,
"loss": 0.328,
"step": 19080
},
{
"epoch": 4.913138592201776,
"grad_norm": 1.04901123046875,
"learning_rate": 8.803088803088803e-07,
"loss": 0.3483,
"step": 19090
},
{
"epoch": 4.915712263543945,
"grad_norm": 0.9599467515945435,
"learning_rate": 8.545688545688547e-07,
"loss": 0.3241,
"step": 19100
},
{
"epoch": 4.918285934886115,
"grad_norm": 3.971909284591675,
"learning_rate": 8.288288288288289e-07,
"loss": 0.3647,
"step": 19110
},
{
"epoch": 4.9208596062282846,
"grad_norm": 1.1139452457427979,
"learning_rate": 8.030888030888032e-07,
"loss": 0.3326,
"step": 19120
},
{
"epoch": 4.923433277570454,
"grad_norm": 1.015626311302185,
"learning_rate": 7.773487773487774e-07,
"loss": 0.3422,
"step": 19130
},
{
"epoch": 4.926006948912624,
"grad_norm": 1.0744688510894775,
"learning_rate": 7.516087516087516e-07,
"loss": 0.3471,
"step": 19140
},
{
"epoch": 4.9285806202547935,
"grad_norm": 1.1969647407531738,
"learning_rate": 7.258687258687259e-07,
"loss": 0.3139,
"step": 19150
},
{
"epoch": 4.931154291596963,
"grad_norm": 0.8164909482002258,
"learning_rate": 7.001287001287001e-07,
"loss": 0.3355,
"step": 19160
},
{
"epoch": 4.933727962939132,
"grad_norm": 1.189275860786438,
"learning_rate": 6.743886743886745e-07,
"loss": 0.3782,
"step": 19170
},
{
"epoch": 4.936301634281302,
"grad_norm": 1.551167368888855,
"learning_rate": 6.486486486486486e-07,
"loss": 0.3773,
"step": 19180
},
{
"epoch": 4.938875305623472,
"grad_norm": 0.9430525898933411,
"learning_rate": 6.22908622908623e-07,
"loss": 0.3332,
"step": 19190
},
{
"epoch": 4.941448976965641,
"grad_norm": 1.2511941194534302,
"learning_rate": 5.971685971685971e-07,
"loss": 0.3804,
"step": 19200
},
{
"epoch": 4.944022648307811,
"grad_norm": 1.0374319553375244,
"learning_rate": 5.714285714285715e-07,
"loss": 0.3166,
"step": 19210
},
{
"epoch": 4.946596319649981,
"grad_norm": 0.9965717792510986,
"learning_rate": 5.456885456885458e-07,
"loss": 0.3132,
"step": 19220
},
{
"epoch": 4.94916999099215,
"grad_norm": 1.1020451784133911,
"learning_rate": 5.1994851994852e-07,
"loss": 0.3717,
"step": 19230
},
{
"epoch": 4.95174366233432,
"grad_norm": 0.7918252944946289,
"learning_rate": 4.942084942084943e-07,
"loss": 0.2936,
"step": 19240
},
{
"epoch": 4.95431733367649,
"grad_norm": 1.4422928094863892,
"learning_rate": 4.6846846846846847e-07,
"loss": 0.3374,
"step": 19250
},
{
"epoch": 4.956891005018659,
"grad_norm": 0.949720025062561,
"learning_rate": 4.427284427284428e-07,
"loss": 0.3771,
"step": 19260
},
{
"epoch": 4.959464676360829,
"grad_norm": 1.0243984460830688,
"learning_rate": 4.16988416988417e-07,
"loss": 0.3283,
"step": 19270
},
{
"epoch": 4.962038347702999,
"grad_norm": 1.0813188552856445,
"learning_rate": 3.912483912483913e-07,
"loss": 0.3885,
"step": 19280
},
{
"epoch": 4.964612019045168,
"grad_norm": 4.7011799812316895,
"learning_rate": 3.6550836550836556e-07,
"loss": 0.3065,
"step": 19290
},
{
"epoch": 4.967185690387337,
"grad_norm": 5.996508598327637,
"learning_rate": 3.397683397683398e-07,
"loss": 0.3949,
"step": 19300
},
{
"epoch": 4.9697593617295075,
"grad_norm": 1.202574610710144,
"learning_rate": 3.1402831402831407e-07,
"loss": 0.3265,
"step": 19310
},
{
"epoch": 4.972333033071677,
"grad_norm": 0.9816371202468872,
"learning_rate": 2.8828828828828833e-07,
"loss": 0.3646,
"step": 19320
},
{
"epoch": 4.974906704413846,
"grad_norm": 1.1760458946228027,
"learning_rate": 2.6254826254826254e-07,
"loss": 0.3221,
"step": 19330
},
{
"epoch": 4.977480375756016,
"grad_norm": 1.4523731470108032,
"learning_rate": 2.3680823680823682e-07,
"loss": 0.3247,
"step": 19340
},
{
"epoch": 4.980054047098186,
"grad_norm": 1.1711961030960083,
"learning_rate": 2.1106821106821108e-07,
"loss": 0.3514,
"step": 19350
},
{
"epoch": 4.982627718440355,
"grad_norm": 0.8875769972801208,
"learning_rate": 1.8532818532818534e-07,
"loss": 0.3135,
"step": 19360
},
{
"epoch": 4.9852013897825245,
"grad_norm": 1.7097541093826294,
"learning_rate": 1.595881595881596e-07,
"loss": 0.3252,
"step": 19370
},
{
"epoch": 4.987775061124695,
"grad_norm": 3.204772472381592,
"learning_rate": 1.3384813384813385e-07,
"loss": 0.2987,
"step": 19380
},
{
"epoch": 4.990348732466864,
"grad_norm": 2.616095542907715,
"learning_rate": 1.0810810810810811e-07,
"loss": 0.3303,
"step": 19390
},
{
"epoch": 4.992922403809033,
"grad_norm": 1.0616631507873535,
"learning_rate": 8.236808236808237e-08,
"loss": 0.3565,
"step": 19400
},
{
"epoch": 4.995496075151204,
"grad_norm": 0.8763009905815125,
"learning_rate": 5.662805662805663e-08,
"loss": 0.3151,
"step": 19410
},
{
"epoch": 4.998069746493373,
"grad_norm": 1.2131026983261108,
"learning_rate": 3.088803088803089e-08,
"loss": 0.3425,
"step": 19420
}
],
"logging_steps": 10,
"max_steps": 19425,
"num_input_tokens_seen": 0,
"num_train_epochs": 5,
"save_steps": 2000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 1.4362014238809784e+18,
"train_batch_size": 10,
"trial_name": null,
"trial_params": null
}