| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 3.0, |
| "eval_steps": 500, |
| "global_step": 1278, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.023508668821627974, |
| "grad_norm": 84.37700653076172, |
| "learning_rate": 7.031250000000001e-06, |
| "loss": 115.85, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.04701733764325595, |
| "grad_norm": 106.4520034790039, |
| "learning_rate": 1.484375e-05, |
| "loss": 116.5781, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.07052600646488393, |
| "grad_norm": 130.89254760742188, |
| "learning_rate": 2.2656250000000002e-05, |
| "loss": 103.3281, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.0940346752865119, |
| "grad_norm": 37.089256286621094, |
| "learning_rate": 3.0468750000000002e-05, |
| "loss": 60.0406, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.11754334410813988, |
| "grad_norm": 44.051109313964844, |
| "learning_rate": 3.828125e-05, |
| "loss": 39.75, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.14105201292976785, |
| "grad_norm": 43.561981201171875, |
| "learning_rate": 4.609375e-05, |
| "loss": 24.7984, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.16456068175139582, |
| "grad_norm": 34.622520446777344, |
| "learning_rate": 5.3906250000000006e-05, |
| "loss": 11.9723, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.1880693505730238, |
| "grad_norm": 22.26849937438965, |
| "learning_rate": 6.171875e-05, |
| "loss": 5.4578, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.2115780193946518, |
| "grad_norm": 9.059814453125, |
| "learning_rate": 6.953125e-05, |
| "loss": 3.992, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.23508668821627976, |
| "grad_norm": 4.332883834838867, |
| "learning_rate": 7.734375e-05, |
| "loss": 3.1058, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.2585953570379077, |
| "grad_norm": 6.844908237457275, |
| "learning_rate": 8.515625e-05, |
| "loss": 2.6654, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.2821040258595357, |
| "grad_norm": 4.539117813110352, |
| "learning_rate": 9.296875e-05, |
| "loss": 2.422, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.3056126946811637, |
| "grad_norm": 3.098025321960449, |
| "learning_rate": 9.999981342914437e-05, |
| "loss": 2.1623, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.32912136350279164, |
| "grad_norm": 1.3452321290969849, |
| "learning_rate": 9.997742661115932e-05, |
| "loss": 2.1429, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.35263003232441964, |
| "grad_norm": 0.37226417660713196, |
| "learning_rate": 9.991774476447404e-05, |
| "loss": 2.1347, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.3761387011460476, |
| "grad_norm": 3.523218870162964, |
| "learning_rate": 9.982081242591919e-05, |
| "loss": 2.0512, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.3996473699676756, |
| "grad_norm": 0.33414289355278015, |
| "learning_rate": 9.968670193003843e-05, |
| "loss": 2.047, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.4231560387893036, |
| "grad_norm": 0.24106919765472412, |
| "learning_rate": 9.951551335510978e-05, |
| "loss": 2.1046, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.4466647076109315, |
| "grad_norm": 0.328752726316452, |
| "learning_rate": 9.930737444846331e-05, |
| "loss": 2.1397, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.4701733764325595, |
| "grad_norm": 1.1409285068511963, |
| "learning_rate": 9.906244053115143e-05, |
| "loss": 2.1377, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.49368204525418746, |
| "grad_norm": 0.0690290704369545, |
| "learning_rate": 9.87808943820424e-05, |
| "loss": 2.0992, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.5171907140758154, |
| "grad_norm": 0.20527280867099762, |
| "learning_rate": 9.846294610142398e-05, |
| "loss": 2.1339, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.5406993828974435, |
| "grad_norm": 0.3991909623146057, |
| "learning_rate": 9.810883295421864e-05, |
| "loss": 2.1176, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.5642080517190714, |
| "grad_norm": 0.08950258791446686, |
| "learning_rate": 9.771881919292765e-05, |
| "loss": 2.0895, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.5877167205406993, |
| "grad_norm": 0.17848969995975494, |
| "learning_rate": 9.729319586043591e-05, |
| "loss": 2.0877, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.6112253893623274, |
| "grad_norm": 0.30160897970199585, |
| "learning_rate": 9.683228057282483e-05, |
| "loss": 2.0648, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.6347340581839553, |
| "grad_norm": 0.11883804202079773, |
| "learning_rate": 9.63364172823554e-05, |
| "loss": 2.0591, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.6582427270055833, |
| "grad_norm": 0.1619461327791214, |
| "learning_rate": 9.580597602079802e-05, |
| "loss": 2.1386, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.6817513958272113, |
| "grad_norm": 0.11473017930984497, |
| "learning_rate": 9.524135262330098e-05, |
| "loss": 2.1028, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.7052600646488393, |
| "grad_norm": 0.059143248945474625, |
| "learning_rate": 9.464296843300342e-05, |
| "loss": 2.0881, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.7287687334704672, |
| "grad_norm": 0.1624162495136261, |
| "learning_rate": 9.401126998661328e-05, |
| "loss": 2.0971, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.7522774022920952, |
| "grad_norm": 0.15460653603076935, |
| "learning_rate": 9.334672868118491e-05, |
| "loss": 2.1154, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.7757860711137232, |
| "grad_norm": 0.4213317334651947, |
| "learning_rate": 9.26498404223449e-05, |
| "loss": 2.1113, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.7992947399353512, |
| "grad_norm": 0.14529550075531006, |
| "learning_rate": 9.192112525422868e-05, |
| "loss": 2.0867, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.8228034087569791, |
| "grad_norm": 0.06652400642633438, |
| "learning_rate": 9.116112697140418e-05, |
| "loss": 2.0807, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.8463120775786072, |
| "grad_norm": 0.12574820220470428, |
| "learning_rate": 9.037041271307188e-05, |
| "loss": 2.1711, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.8698207464002351, |
| "grad_norm": 0.10869150608778, |
| "learning_rate": 8.954957253984426e-05, |
| "loss": 2.1252, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.893329415221863, |
| "grad_norm": 0.143524169921875, |
| "learning_rate": 8.869921899342056e-05, |
| "loss": 2.0468, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.916838084043491, |
| "grad_norm": 0.27682727575302124, |
| "learning_rate": 8.781998663948513e-05, |
| "loss": 2.1023, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.940346752865119, |
| "grad_norm": 0.08671136200428009, |
| "learning_rate": 8.691253159417074e-05, |
| "loss": 2.0991, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.963855421686747, |
| "grad_norm": 0.14743109047412872, |
| "learning_rate": 8.597753103444016e-05, |
| "loss": 2.0858, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.9873640905083749, |
| "grad_norm": 0.14369799196720123, |
| "learning_rate": 8.501568269275126e-05, |
| "loss": 2.1057, |
| "step": 420 |
| }, |
| { |
| "epoch": 1.0094034675286512, |
| "grad_norm": 0.09220755100250244, |
| "learning_rate": 8.40277043363831e-05, |
| "loss": 1.9405, |
| "step": 430 |
| }, |
| { |
| "epoch": 1.0329121363502791, |
| "grad_norm": 0.25697803497314453, |
| "learning_rate": 8.301433323181076e-05, |
| "loss": 2.1221, |
| "step": 440 |
| }, |
| { |
| "epoch": 1.056420805171907, |
| "grad_norm": 0.1321459412574768, |
| "learning_rate": 8.19763255945298e-05, |
| "loss": 2.1576, |
| "step": 450 |
| }, |
| { |
| "epoch": 1.079929473993535, |
| "grad_norm": 0.11310122162103653, |
| "learning_rate": 8.091445602473972e-05, |
| "loss": 2.0483, |
| "step": 460 |
| }, |
| { |
| "epoch": 1.1034381428151632, |
| "grad_norm": 0.08170254528522491, |
| "learning_rate": 7.982951692930829e-05, |
| "loss": 2.1367, |
| "step": 470 |
| }, |
| { |
| "epoch": 1.1269468116367911, |
| "grad_norm": 0.022492246702313423, |
| "learning_rate": 7.87223179304479e-05, |
| "loss": 2.1435, |
| "step": 480 |
| }, |
| { |
| "epoch": 1.150455480458419, |
| "grad_norm": 0.015893638134002686, |
| "learning_rate": 7.759368526154509e-05, |
| "loss": 2.0666, |
| "step": 490 |
| }, |
| { |
| "epoch": 1.173964149280047, |
| "grad_norm": 0.053727954626083374, |
| "learning_rate": 7.644446115059425e-05, |
| "loss": 2.0886, |
| "step": 500 |
| }, |
| { |
| "epoch": 1.197472818101675, |
| "grad_norm": 0.12717482447624207, |
| "learning_rate": 7.527550319169546e-05, |
| "loss": 2.1418, |
| "step": 510 |
| }, |
| { |
| "epoch": 1.2209814869233029, |
| "grad_norm": 0.030752327293157578, |
| "learning_rate": 7.408768370508576e-05, |
| "loss": 2.0688, |
| "step": 520 |
| }, |
| { |
| "epoch": 1.244490155744931, |
| "grad_norm": 0.07915918529033661, |
| "learning_rate": 7.288188908618102e-05, |
| "loss": 2.0857, |
| "step": 530 |
| }, |
| { |
| "epoch": 1.267998824566559, |
| "grad_norm": 0.09853250533342361, |
| "learning_rate": 7.165901914411435e-05, |
| "loss": 2.0392, |
| "step": 540 |
| }, |
| { |
| "epoch": 1.291507493388187, |
| "grad_norm": 0.21571961045265198, |
| "learning_rate": 7.041998643026511e-05, |
| "loss": 2.0921, |
| "step": 550 |
| }, |
| { |
| "epoch": 1.3150161622098149, |
| "grad_norm": 0.10290564596652985, |
| "learning_rate": 6.916571555727852e-05, |
| "loss": 2.0686, |
| "step": 560 |
| }, |
| { |
| "epoch": 1.3385248310314428, |
| "grad_norm": 0.08689925819635391, |
| "learning_rate": 6.789714250908533e-05, |
| "loss": 2.0845, |
| "step": 570 |
| }, |
| { |
| "epoch": 1.3620334998530708, |
| "grad_norm": 0.1267048567533493, |
| "learning_rate": 6.661521394243533e-05, |
| "loss": 2.1465, |
| "step": 580 |
| }, |
| { |
| "epoch": 1.3855421686746987, |
| "grad_norm": 0.18292003870010376, |
| "learning_rate": 6.532088648046677e-05, |
| "loss": 2.067, |
| "step": 590 |
| }, |
| { |
| "epoch": 1.4090508374963266, |
| "grad_norm": 0.051083244383335114, |
| "learning_rate": 6.401512599883818e-05, |
| "loss": 2.0697, |
| "step": 600 |
| }, |
| { |
| "epoch": 1.4325595063179548, |
| "grad_norm": 0.0492679663002491, |
| "learning_rate": 6.269890690495593e-05, |
| "loss": 2.1064, |
| "step": 610 |
| }, |
| { |
| "epoch": 1.4560681751395828, |
| "grad_norm": 0.051078781485557556, |
| "learning_rate": 6.137321141083468e-05, |
| "loss": 2.0533, |
| "step": 620 |
| }, |
| { |
| "epoch": 1.4795768439612107, |
| "grad_norm": 0.11715124547481537, |
| "learning_rate": 6.003902880013416e-05, |
| "loss": 2.1156, |
| "step": 630 |
| }, |
| { |
| "epoch": 1.5030855127828386, |
| "grad_norm": 0.041975561529397964, |
| "learning_rate": 5.869735468991854e-05, |
| "loss": 2.0543, |
| "step": 640 |
| }, |
| { |
| "epoch": 1.5265941816044668, |
| "grad_norm": 0.10740803927183151, |
| "learning_rate": 5.73491902876897e-05, |
| "loss": 2.0539, |
| "step": 650 |
| }, |
| { |
| "epoch": 1.5501028504260947, |
| "grad_norm": 0.07230595499277115, |
| "learning_rate": 5.599554164424859e-05, |
| "loss": 2.1078, |
| "step": 660 |
| }, |
| { |
| "epoch": 1.5736115192477227, |
| "grad_norm": 0.010545202530920506, |
| "learning_rate": 5.463741890294257e-05, |
| "loss": 2.1434, |
| "step": 670 |
| }, |
| { |
| "epoch": 1.5971201880693506, |
| "grad_norm": 0.0882532000541687, |
| "learning_rate": 5.327583554585842e-05, |
| "loss": 2.0696, |
| "step": 680 |
| }, |
| { |
| "epoch": 1.6206288568909786, |
| "grad_norm": 0.15730935335159302, |
| "learning_rate": 5.191180763752418e-05, |
| "loss": 2.0743, |
| "step": 690 |
| }, |
| { |
| "epoch": 1.6441375257126065, |
| "grad_norm": 0.16760171949863434, |
| "learning_rate": 5.054635306668355e-05, |
| "loss": 2.1303, |
| "step": 700 |
| }, |
| { |
| "epoch": 1.6676461945342345, |
| "grad_norm": 0.12746253609657288, |
| "learning_rate": 4.9180490786709335e-05, |
| "loss": 2.1359, |
| "step": 710 |
| }, |
| { |
| "epoch": 1.6911548633558624, |
| "grad_norm": 0.08576823770999908, |
| "learning_rate": 4.781524005522216e-05, |
| "loss": 2.0884, |
| "step": 720 |
| }, |
| { |
| "epoch": 1.7146635321774903, |
| "grad_norm": 0.01966356858611107, |
| "learning_rate": 4.645161967348223e-05, |
| "loss": 2.1102, |
| "step": 730 |
| }, |
| { |
| "epoch": 1.7381722009991183, |
| "grad_norm": 0.1140172928571701, |
| "learning_rate": 4.509064722612169e-05, |
| "loss": 2.134, |
| "step": 740 |
| }, |
| { |
| "epoch": 1.7616808698207465, |
| "grad_norm": 0.11263474822044373, |
| "learning_rate": 4.373333832178478e-05, |
| "loss": 2.212, |
| "step": 750 |
| }, |
| { |
| "epoch": 1.7851895386423744, |
| "grad_norm": 0.01955031044781208, |
| "learning_rate": 4.2380705835242716e-05, |
| "loss": 2.0876, |
| "step": 760 |
| }, |
| { |
| "epoch": 1.8086982074640023, |
| "grad_norm": 0.02556225284934044, |
| "learning_rate": 4.10337591515484e-05, |
| "loss": 2.064, |
| "step": 770 |
| }, |
| { |
| "epoch": 1.8322068762856303, |
| "grad_norm": 0.04094263166189194, |
| "learning_rate": 3.969350341279566e-05, |
| "loss": 2.1206, |
| "step": 780 |
| }, |
| { |
| "epoch": 1.8557155451072584, |
| "grad_norm": 0.0680101215839386, |
| "learning_rate": 3.8360938768044405e-05, |
| "loss": 2.0975, |
| "step": 790 |
| }, |
| { |
| "epoch": 1.8792242139288864, |
| "grad_norm": 0.08410263061523438, |
| "learning_rate": 3.7037059626971944e-05, |
| "loss": 2.0519, |
| "step": 800 |
| }, |
| { |
| "epoch": 1.9027328827505143, |
| "grad_norm": 0.02342092990875244, |
| "learning_rate": 3.572285391780734e-05, |
| "loss": 2.019, |
| "step": 810 |
| }, |
| { |
| "epoch": 1.9262415515721423, |
| "grad_norm": 0.02327684499323368, |
| "learning_rate": 3.441930235010211e-05, |
| "loss": 2.0966, |
| "step": 820 |
| }, |
| { |
| "epoch": 1.9497502203937702, |
| "grad_norm": 0.0357813723385334, |
| "learning_rate": 3.312737768288827e-05, |
| "loss": 2.0924, |
| "step": 830 |
| }, |
| { |
| "epoch": 1.9732588892153982, |
| "grad_norm": 0.07186655700206757, |
| "learning_rate": 3.184804399876886e-05, |
| "loss": 2.0315, |
| "step": 840 |
| }, |
| { |
| "epoch": 1.996767558037026, |
| "grad_norm": 0.09240284562110901, |
| "learning_rate": 3.0582255984483535e-05, |
| "loss": 2.0409, |
| "step": 850 |
| }, |
| { |
| "epoch": 2.0188069350573024, |
| "grad_norm": 0.04462951049208641, |
| "learning_rate": 2.9330958218485392e-05, |
| "loss": 1.9696, |
| "step": 860 |
| }, |
| { |
| "epoch": 2.0423156038789303, |
| "grad_norm": 0.029340475797653198, |
| "learning_rate": 2.8095084466061367e-05, |
| "loss": 2.1055, |
| "step": 870 |
| }, |
| { |
| "epoch": 2.0658242727005582, |
| "grad_norm": 0.048022590577602386, |
| "learning_rate": 2.6875556982521248e-05, |
| "loss": 2.0865, |
| "step": 880 |
| }, |
| { |
| "epoch": 2.089332941522186, |
| "grad_norm": 0.04670249670743942, |
| "learning_rate": 2.5673285824976575e-05, |
| "loss": 2.1081, |
| "step": 890 |
| }, |
| { |
| "epoch": 2.112841610343814, |
| "grad_norm": 0.015516964718699455, |
| "learning_rate": 2.4489168173221567e-05, |
| "loss": 2.1023, |
| "step": 900 |
| }, |
| { |
| "epoch": 2.136350279165442, |
| "grad_norm": 0.06762902438640594, |
| "learning_rate": 2.33240876602242e-05, |
| "loss": 2.0929, |
| "step": 910 |
| }, |
| { |
| "epoch": 2.15985894798707, |
| "grad_norm": 0.014990041963756084, |
| "learning_rate": 2.2178913712726045e-05, |
| "loss": 2.0817, |
| "step": 920 |
| }, |
| { |
| "epoch": 2.1833676168086984, |
| "grad_norm": 0.07376622408628464, |
| "learning_rate": 2.1054500902443423e-05, |
| "loss": 2.0868, |
| "step": 930 |
| }, |
| { |
| "epoch": 2.2068762856303263, |
| "grad_norm": 0.03299334645271301, |
| "learning_rate": 1.9951688308354133e-05, |
| "loss": 2.1123, |
| "step": 940 |
| }, |
| { |
| "epoch": 2.2303849544519543, |
| "grad_norm": 0.05409041792154312, |
| "learning_rate": 1.887129889054503e-05, |
| "loss": 2.051, |
| "step": 950 |
| }, |
| { |
| "epoch": 2.2538936232735822, |
| "grad_norm": 0.041158534586429596, |
| "learning_rate": 1.7814138876088566e-05, |
| "loss": 2.0458, |
| "step": 960 |
| }, |
| { |
| "epoch": 2.27740229209521, |
| "grad_norm": 0.03300878778100014, |
| "learning_rate": 1.6780997157405846e-05, |
| "loss": 2.0565, |
| "step": 970 |
| }, |
| { |
| "epoch": 2.300910960916838, |
| "grad_norm": 0.009902500547468662, |
| "learning_rate": 1.5772644703565565e-05, |
| "loss": 2.0952, |
| "step": 980 |
| }, |
| { |
| "epoch": 2.324419629738466, |
| "grad_norm": 0.44653964042663574, |
| "learning_rate": 1.4789833984957962e-05, |
| "loss": 2.0386, |
| "step": 990 |
| }, |
| { |
| "epoch": 2.347928298560094, |
| "grad_norm": 0.4100548326969147, |
| "learning_rate": 1.3833298411773276e-05, |
| "loss": 2.0977, |
| "step": 1000 |
| }, |
| { |
| "epoch": 2.371436967381722, |
| "grad_norm": 0.00745609262958169, |
| "learning_rate": 1.2903751786703482e-05, |
| "loss": 2.0503, |
| "step": 1010 |
| }, |
| { |
| "epoch": 2.39494563620335, |
| "grad_norm": 0.05212165042757988, |
| "learning_rate": 1.2001887772276076e-05, |
| "loss": 2.1437, |
| "step": 1020 |
| }, |
| { |
| "epoch": 2.418454305024978, |
| "grad_norm": 0.0367308035492897, |
| "learning_rate": 1.1128379373217058e-05, |
| "loss": 2.0911, |
| "step": 1030 |
| }, |
| { |
| "epoch": 2.4419629738466058, |
| "grad_norm": 0.02135683037340641, |
| "learning_rate": 1.028387843422952e-05, |
| "loss": 2.1183, |
| "step": 1040 |
| }, |
| { |
| "epoch": 2.465471642668234, |
| "grad_norm": 0.02447247877717018, |
| "learning_rate": 9.469015153562855e-06, |
| "loss": 2.1205, |
| "step": 1050 |
| }, |
| { |
| "epoch": 2.488980311489862, |
| "grad_norm": 0.011414138600230217, |
| "learning_rate": 8.684397612735024e-06, |
| "loss": 2.0027, |
| "step": 1060 |
| }, |
| { |
| "epoch": 2.51248898031149, |
| "grad_norm": 0.0028556822799146175, |
| "learning_rate": 7.930611322759462e-06, |
| "loss": 2.0682, |
| "step": 1070 |
| }, |
| { |
| "epoch": 2.535997649133118, |
| "grad_norm": 0.008712991140782833, |
| "learning_rate": 7.2082187872147935e-06, |
| "loss": 2.0895, |
| "step": 1080 |
| }, |
| { |
| "epoch": 2.559506317954746, |
| "grad_norm": 0.03148069605231285, |
| "learning_rate": 6.517759082483543e-06, |
| "loss": 2.1298, |
| "step": 1090 |
| }, |
| { |
| "epoch": 2.583014986776374, |
| "grad_norm": 0.03679489344358444, |
| "learning_rate": 5.8597474554731745e-06, |
| "loss": 2.0837, |
| "step": 1100 |
| }, |
| { |
| "epoch": 2.606523655598002, |
| "grad_norm": 0.05236874148249626, |
| "learning_rate": 5.234674939119538e-06, |
| "loss": 2.1486, |
| "step": 1110 |
| }, |
| { |
| "epoch": 2.6300323244196298, |
| "grad_norm": 0.03369889780879021, |
| "learning_rate": 4.643007985959641e-06, |
| "loss": 2.0959, |
| "step": 1120 |
| }, |
| { |
| "epoch": 2.6535409932412577, |
| "grad_norm": 0.006195698864758015, |
| "learning_rate": 4.085188120047362e-06, |
| "loss": 2.0439, |
| "step": 1130 |
| }, |
| { |
| "epoch": 2.6770496620628856, |
| "grad_norm": 0.03900829330086708, |
| "learning_rate": 3.5616316074715995e-06, |
| "loss": 2.111, |
| "step": 1140 |
| }, |
| { |
| "epoch": 2.7005583308845136, |
| "grad_norm": 0.00790059007704258, |
| "learning_rate": 3.0727291457229303e-06, |
| "loss": 2.1078, |
| "step": 1150 |
| }, |
| { |
| "epoch": 2.7240669997061415, |
| "grad_norm": 0.017095139250159264, |
| "learning_rate": 2.618845572140527e-06, |
| "loss": 2.1248, |
| "step": 1160 |
| }, |
| { |
| "epoch": 2.7475756685277695, |
| "grad_norm": 0.006935155484825373, |
| "learning_rate": 2.2003195916567855e-06, |
| "loss": 2.0573, |
| "step": 1170 |
| }, |
| { |
| "epoch": 2.7710843373493974, |
| "grad_norm": 0.01475539617240429, |
| "learning_rate": 1.8174635240431282e-06, |
| "loss": 2.0218, |
| "step": 1180 |
| }, |
| { |
| "epoch": 2.7945930061710254, |
| "grad_norm": 0.08390510082244873, |
| "learning_rate": 1.4705630708451712e-06, |
| "loss": 2.0729, |
| "step": 1190 |
| }, |
| { |
| "epoch": 2.8181016749926533, |
| "grad_norm": 0.04823232442140579, |
| "learning_rate": 1.1598771021816058e-06, |
| "loss": 2.0432, |
| "step": 1200 |
| }, |
| { |
| "epoch": 2.8416103438142817, |
| "grad_norm": 0.017384668812155724, |
| "learning_rate": 8.856374635655695e-07, |
| "loss": 2.1005, |
| "step": 1210 |
| }, |
| { |
| "epoch": 2.8651190126359096, |
| "grad_norm": 0.016273437067866325, |
| "learning_rate": 6.480488028928422e-07, |
| "loss": 2.1043, |
| "step": 1220 |
| }, |
| { |
| "epoch": 2.8886276814575376, |
| "grad_norm": 0.004406425170600414, |
| "learning_rate": 4.47288417725944e-07, |
| "loss": 2.1214, |
| "step": 1230 |
| }, |
| { |
| "epoch": 2.9121363502791655, |
| "grad_norm": 0.04330357536673546, |
| "learning_rate": 2.8350612298801427e-07, |
| "loss": 2.0679, |
| "step": 1240 |
| }, |
| { |
| "epoch": 2.9356450191007935, |
| "grad_norm": 0.005912340711802244, |
| "learning_rate": 1.5682413916531802e-07, |
| "loss": 2.1937, |
| "step": 1250 |
| }, |
| { |
| "epoch": 2.9591536879224214, |
| "grad_norm": 0.021025869995355606, |
| "learning_rate": 6.733700110174157e-08, |
| "loss": 2.1239, |
| "step": 1260 |
| }, |
| { |
| "epoch": 2.9826623567440493, |
| "grad_norm": 0.006763559300452471, |
| "learning_rate": 1.511148745335089e-08, |
| "loss": 2.1434, |
| "step": 1270 |
| }, |
| { |
| "epoch": 3.0, |
| "step": 1278, |
| "total_flos": 2.0454733263908045e+17, |
| "train_loss": 5.729273645344288, |
| "train_runtime": 5128.0315, |
| "train_samples_per_second": 1.991, |
| "train_steps_per_second": 0.249 |
| } |
| ], |
| "logging_steps": 10, |
| "max_steps": 1278, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 2.0454733263908045e+17, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|