Chengran98's picture
Upload model
34b17cc verified
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 3.0,
"eval_steps": 500,
"global_step": 1290,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.023255813953488372,
"grad_norm": 2.1102249816437895,
"learning_rate": 2.0930232558139536e-06,
"loss": 0.4502,
"step": 10
},
{
"epoch": 0.046511627906976744,
"grad_norm": 0.6087936521314388,
"learning_rate": 4.418604651162791e-06,
"loss": 0.3222,
"step": 20
},
{
"epoch": 0.06976744186046512,
"grad_norm": 0.5276269917240805,
"learning_rate": 6.744186046511628e-06,
"loss": 0.2413,
"step": 30
},
{
"epoch": 0.09302325581395349,
"grad_norm": 0.33927814937972933,
"learning_rate": 9.069767441860465e-06,
"loss": 0.2011,
"step": 40
},
{
"epoch": 0.11627906976744186,
"grad_norm": 0.41749972892158266,
"learning_rate": 1.1395348837209304e-05,
"loss": 0.1824,
"step": 50
},
{
"epoch": 0.13953488372093023,
"grad_norm": 0.3467307268335015,
"learning_rate": 1.3720930232558139e-05,
"loss": 0.1777,
"step": 60
},
{
"epoch": 0.16279069767441862,
"grad_norm": 0.2909147698009027,
"learning_rate": 1.6046511627906977e-05,
"loss": 0.1641,
"step": 70
},
{
"epoch": 0.18604651162790697,
"grad_norm": 0.6246257892448243,
"learning_rate": 1.8372093023255815e-05,
"loss": 0.1515,
"step": 80
},
{
"epoch": 0.20930232558139536,
"grad_norm": 0.35908006141784865,
"learning_rate": 2.069767441860465e-05,
"loss": 0.1491,
"step": 90
},
{
"epoch": 0.23255813953488372,
"grad_norm": 0.27603388668684564,
"learning_rate": 2.302325581395349e-05,
"loss": 0.1481,
"step": 100
},
{
"epoch": 0.2558139534883721,
"grad_norm": 0.2527784608738212,
"learning_rate": 2.5348837209302324e-05,
"loss": 0.1397,
"step": 110
},
{
"epoch": 0.27906976744186046,
"grad_norm": 0.29932144986900905,
"learning_rate": 2.7674418604651162e-05,
"loss": 0.1404,
"step": 120
},
{
"epoch": 0.3023255813953488,
"grad_norm": 0.3393202145121213,
"learning_rate": 3e-05,
"loss": 0.142,
"step": 130
},
{
"epoch": 0.32558139534883723,
"grad_norm": 0.24298726842558038,
"learning_rate": 2.999450876449861e-05,
"loss": 0.1353,
"step": 140
},
{
"epoch": 0.3488372093023256,
"grad_norm": 0.17700673786533003,
"learning_rate": 2.9978039078483434e-05,
"loss": 0.1316,
"step": 150
},
{
"epoch": 0.37209302325581395,
"grad_norm": 0.23624869484478003,
"learning_rate": 2.9950603000477722e-05,
"loss": 0.1296,
"step": 160
},
{
"epoch": 0.3953488372093023,
"grad_norm": 0.2450597575472511,
"learning_rate": 2.991222061821024e-05,
"loss": 0.1322,
"step": 170
},
{
"epoch": 0.4186046511627907,
"grad_norm": 0.2629976539114997,
"learning_rate": 2.9862920033907654e-05,
"loss": 0.1272,
"step": 180
},
{
"epoch": 0.4418604651162791,
"grad_norm": 0.158909112774518,
"learning_rate": 2.980273734371914e-05,
"loss": 0.1265,
"step": 190
},
{
"epoch": 0.46511627906976744,
"grad_norm": 0.16963032221578891,
"learning_rate": 2.9731716611288024e-05,
"loss": 0.1275,
"step": 200
},
{
"epoch": 0.4883720930232558,
"grad_norm": 0.207022304132148,
"learning_rate": 2.9649909835489935e-05,
"loss": 0.1262,
"step": 210
},
{
"epoch": 0.5116279069767442,
"grad_norm": 0.18726792852194557,
"learning_rate": 2.955737691236108e-05,
"loss": 0.1198,
"step": 220
},
{
"epoch": 0.5348837209302325,
"grad_norm": 0.1701049540713391,
"learning_rate": 2.945418559124446e-05,
"loss": 0.1209,
"step": 230
},
{
"epoch": 0.5581395348837209,
"grad_norm": 0.19958209659130646,
"learning_rate": 2.9340411425186207e-05,
"loss": 0.122,
"step": 240
},
{
"epoch": 0.5813953488372093,
"grad_norm": 0.22288049794980888,
"learning_rate": 2.921613771561829e-05,
"loss": 0.1155,
"step": 250
},
{
"epoch": 0.6046511627906976,
"grad_norm": 0.17056830281402427,
"learning_rate": 2.908145545136816e-05,
"loss": 0.1123,
"step": 260
},
{
"epoch": 0.627906976744186,
"grad_norm": 0.1726845936656448,
"learning_rate": 2.8936463242039924e-05,
"loss": 0.1139,
"step": 270
},
{
"epoch": 0.6511627906976745,
"grad_norm": 0.17997669987525944,
"learning_rate": 2.8781267245815898e-05,
"loss": 0.1085,
"step": 280
},
{
"epoch": 0.6744186046511628,
"grad_norm": 0.19695001871413667,
"learning_rate": 2.8615981091731296e-05,
"loss": 0.113,
"step": 290
},
{
"epoch": 0.6976744186046512,
"grad_norm": 0.19008651260372486,
"learning_rate": 2.8440725796479075e-05,
"loss": 0.1172,
"step": 300
},
{
"epoch": 0.7209302325581395,
"grad_norm": 0.18432725372928307,
"learning_rate": 2.825562967580579e-05,
"loss": 0.113,
"step": 310
},
{
"epoch": 0.7441860465116279,
"grad_norm": 0.14972827867736163,
"learning_rate": 2.8060828250563294e-05,
"loss": 0.1113,
"step": 320
},
{
"epoch": 0.7674418604651163,
"grad_norm": 0.1708006253644624,
"learning_rate": 2.7856464147485202e-05,
"loss": 0.1118,
"step": 330
},
{
"epoch": 0.7906976744186046,
"grad_norm": 0.2545153275078911,
"learning_rate": 2.764268699476058e-05,
"loss": 0.1126,
"step": 340
},
{
"epoch": 0.813953488372093,
"grad_norm": 0.13582442076029375,
"learning_rate": 2.7419653312481482e-05,
"loss": 0.1084,
"step": 350
},
{
"epoch": 0.8372093023255814,
"grad_norm": 0.1885021789487428,
"learning_rate": 2.7187526398044463e-05,
"loss": 0.1177,
"step": 360
},
{
"epoch": 0.8604651162790697,
"grad_norm": 0.18897101004109837,
"learning_rate": 2.6946476206589972e-05,
"loss": 0.1044,
"step": 370
},
{
"epoch": 0.8837209302325582,
"grad_norm": 0.12704249962452174,
"learning_rate": 2.6696679226567202e-05,
"loss": 0.1099,
"step": 380
},
{
"epoch": 0.9069767441860465,
"grad_norm": 0.19659069921161837,
"learning_rate": 2.6438318350515467e-05,
"loss": 0.1084,
"step": 390
},
{
"epoch": 0.9302325581395349,
"grad_norm": 0.17360269942962656,
"learning_rate": 2.617158274115673e-05,
"loss": 0.1086,
"step": 400
},
{
"epoch": 0.9534883720930233,
"grad_norm": 0.15978057253464667,
"learning_rate": 2.5896667692897334e-05,
"loss": 0.098,
"step": 410
},
{
"epoch": 0.9767441860465116,
"grad_norm": 0.12225147895714812,
"learning_rate": 2.5613774488840333e-05,
"loss": 0.1083,
"step": 420
},
{
"epoch": 1.0,
"grad_norm": 0.11136831453413676,
"learning_rate": 2.532311025341309e-05,
"loss": 0.1064,
"step": 430
},
{
"epoch": 1.0232558139534884,
"grad_norm": 0.11500268275827884,
"learning_rate": 2.502488780071807e-05,
"loss": 0.1053,
"step": 440
},
{
"epoch": 1.0465116279069768,
"grad_norm": 0.13683341482554096,
"learning_rate": 2.4719325478717893e-05,
"loss": 0.103,
"step": 450
},
{
"epoch": 1.069767441860465,
"grad_norm": 0.1391528532587655,
"learning_rate": 2.440664700936861e-05,
"loss": 0.102,
"step": 460
},
{
"epoch": 1.0930232558139534,
"grad_norm": 0.1591189829381333,
"learning_rate": 2.408708132481842e-05,
"loss": 0.1017,
"step": 470
},
{
"epoch": 1.1162790697674418,
"grad_norm": 0.10083202954080314,
"learning_rate": 2.376086239979158e-05,
"loss": 0.1047,
"step": 480
},
{
"epoch": 1.1395348837209303,
"grad_norm": 0.2036430998599195,
"learning_rate": 2.3428229080280407e-05,
"loss": 0.1018,
"step": 490
},
{
"epoch": 1.1627906976744187,
"grad_norm": 0.15460019789887855,
"learning_rate": 2.3089424908670642e-05,
"loss": 0.1006,
"step": 500
},
{
"epoch": 1.1860465116279069,
"grad_norm": 0.1171523300082374,
"learning_rate": 2.2744697945428307e-05,
"loss": 0.1007,
"step": 510
},
{
"epoch": 1.2093023255813953,
"grad_norm": 0.19738915842381824,
"learning_rate": 2.2394300587478566e-05,
"loss": 0.0972,
"step": 520
},
{
"epoch": 1.2325581395348837,
"grad_norm": 0.16778096884891686,
"learning_rate": 2.2038489383409652e-05,
"loss": 0.1027,
"step": 530
},
{
"epoch": 1.255813953488372,
"grad_norm": 0.13067156491026763,
"learning_rate": 2.167752484563696e-05,
"loss": 0.0997,
"step": 540
},
{
"epoch": 1.2790697674418605,
"grad_norm": 0.16129671511714355,
"learning_rate": 2.13116712596651e-05,
"loss": 0.1025,
"step": 550
},
{
"epoch": 1.302325581395349,
"grad_norm": 0.15785271361590572,
"learning_rate": 2.0941196490587352e-05,
"loss": 0.1002,
"step": 560
},
{
"epoch": 1.3255813953488373,
"grad_norm": 0.13841764309679408,
"learning_rate": 2.05663717869643e-05,
"loss": 0.0994,
"step": 570
},
{
"epoch": 1.3488372093023255,
"grad_norm": 0.17509131465232344,
"learning_rate": 2.0187471582225173e-05,
"loss": 0.0993,
"step": 580
},
{
"epoch": 1.372093023255814,
"grad_norm": 0.1221451212745489,
"learning_rate": 1.9804773293737416e-05,
"loss": 0.0998,
"step": 590
},
{
"epoch": 1.3953488372093024,
"grad_norm": 0.14688306712971377,
"learning_rate": 1.9418557119691434e-05,
"loss": 0.0988,
"step": 600
},
{
"epoch": 1.4186046511627908,
"grad_norm": 0.10658674461240546,
"learning_rate": 1.902910583394938e-05,
"loss": 0.0994,
"step": 610
},
{
"epoch": 1.441860465116279,
"grad_norm": 0.11935538384825148,
"learning_rate": 1.8636704579008096e-05,
"loss": 0.0985,
"step": 620
},
{
"epoch": 1.4651162790697674,
"grad_norm": 0.12721218184322167,
"learning_rate": 1.824164065722783e-05,
"loss": 0.0994,
"step": 630
},
{
"epoch": 1.4883720930232558,
"grad_norm": 0.09137892917036859,
"learning_rate": 1.7844203320479614e-05,
"loss": 0.1047,
"step": 640
},
{
"epoch": 1.5116279069767442,
"grad_norm": 0.23254378397439393,
"learning_rate": 1.7444683558365182e-05,
"loss": 0.0959,
"step": 650
},
{
"epoch": 1.5348837209302326,
"grad_norm": 0.12936296282790546,
"learning_rate": 1.7043373885164703e-05,
"loss": 0.0968,
"step": 660
},
{
"epoch": 1.558139534883721,
"grad_norm": 0.15526942233091734,
"learning_rate": 1.664056812566812e-05,
"loss": 0.1004,
"step": 670
},
{
"epoch": 1.5813953488372094,
"grad_norm": 0.12963665517042525,
"learning_rate": 1.623656120004698e-05,
"loss": 0.0921,
"step": 680
},
{
"epoch": 1.6046511627906976,
"grad_norm": 0.11191785293454326,
"learning_rate": 1.5831648907924337e-05,
"loss": 0.0969,
"step": 690
},
{
"epoch": 1.627906976744186,
"grad_norm": 0.13773389105774028,
"learning_rate": 1.5426127711800636e-05,
"loss": 0.0994,
"step": 700
},
{
"epoch": 1.6511627906976745,
"grad_norm": 0.10576569528021193,
"learning_rate": 1.5020294519994381e-05,
"loss": 0.0957,
"step": 710
},
{
"epoch": 1.6744186046511627,
"grad_norm": 0.1437216064703046,
"learning_rate": 1.4614446469256305e-05,
"loss": 0.0945,
"step": 720
},
{
"epoch": 1.697674418604651,
"grad_norm": 0.10803257779082216,
"learning_rate": 1.4208880707216323e-05,
"loss": 0.095,
"step": 730
},
{
"epoch": 1.7209302325581395,
"grad_norm": 0.12123283799800175,
"learning_rate": 1.3803894174822518e-05,
"loss": 0.0944,
"step": 740
},
{
"epoch": 1.744186046511628,
"grad_norm": 0.100414996108917,
"learning_rate": 1.3399783388931468e-05,
"loss": 0.0954,
"step": 750
},
{
"epoch": 1.7674418604651163,
"grad_norm": 0.1369011425168964,
"learning_rate": 1.2996844225209033e-05,
"loss": 0.0984,
"step": 760
},
{
"epoch": 1.7906976744186047,
"grad_norm": 0.1359084312900855,
"learning_rate": 1.2595371701500639e-05,
"loss": 0.0962,
"step": 770
},
{
"epoch": 1.8139534883720931,
"grad_norm": 0.13125777727884955,
"learning_rate": 1.219565976182963e-05,
"loss": 0.0972,
"step": 780
},
{
"epoch": 1.8372093023255816,
"grad_norm": 0.13341223023541443,
"learning_rate": 1.1798001061181799e-05,
"loss": 0.0952,
"step": 790
},
{
"epoch": 1.8604651162790697,
"grad_norm": 0.08587984083521312,
"learning_rate": 1.1402686751233723e-05,
"loss": 0.0969,
"step": 800
},
{
"epoch": 1.8837209302325582,
"grad_norm": 0.11630108959701958,
"learning_rate": 1.101000626718182e-05,
"loss": 0.0915,
"step": 810
},
{
"epoch": 1.9069767441860463,
"grad_norm": 0.0951067714002941,
"learning_rate": 1.0620247115828044e-05,
"loss": 0.0966,
"step": 820
},
{
"epoch": 1.9302325581395348,
"grad_norm": 0.10789836511240755,
"learning_rate": 1.0233694665077584e-05,
"loss": 0.0946,
"step": 830
},
{
"epoch": 1.9534883720930232,
"grad_norm": 0.10824454265797709,
"learning_rate": 9.850631935002531e-06,
"loss": 0.0956,
"step": 840
},
{
"epoch": 1.9767441860465116,
"grad_norm": 0.10283011375161531,
"learning_rate": 9.471339390624574e-06,
"loss": 0.0949,
"step": 850
},
{
"epoch": 2.0,
"grad_norm": 0.15809185521039087,
"learning_rate": 9.0960947365684e-06,
"loss": 0.0934,
"step": 860
},
{
"epoch": 2.0232558139534884,
"grad_norm": 0.15384352891044772,
"learning_rate": 8.725172713736136e-06,
"loss": 0.0889,
"step": 870
},
{
"epoch": 2.046511627906977,
"grad_norm": 0.11467341711361749,
"learning_rate": 8.358844898151791e-06,
"loss": 0.0882,
"step": 880
},
{
"epoch": 2.0697674418604652,
"grad_norm": 0.10965841387662369,
"learning_rate": 7.997379502122849e-06,
"loss": 0.0944,
"step": 890
},
{
"epoch": 2.0930232558139537,
"grad_norm": 0.1024892604780862,
"learning_rate": 7.641041177864661e-06,
"loss": 0.0892,
"step": 900
},
{
"epoch": 2.116279069767442,
"grad_norm": 0.14766897087136052,
"learning_rate": 7.290090823731452e-06,
"loss": 0.0904,
"step": 910
},
{
"epoch": 2.13953488372093,
"grad_norm": 0.11551061066180639,
"learning_rate": 6.944785393195742e-06,
"loss": 0.0933,
"step": 920
},
{
"epoch": 2.1627906976744184,
"grad_norm": 0.1307266761701476,
"learning_rate": 6.605377706716049e-06,
"loss": 0.0866,
"step": 930
},
{
"epoch": 2.186046511627907,
"grad_norm": 0.10681432279549767,
"learning_rate": 6.27211626663071e-06,
"loss": 0.0869,
"step": 940
},
{
"epoch": 2.2093023255813953,
"grad_norm": 0.1471123103304021,
"learning_rate": 5.945245075213187e-06,
"loss": 0.0858,
"step": 950
},
{
"epoch": 2.2325581395348837,
"grad_norm": 0.1518492318767705,
"learning_rate": 5.625003456022247e-06,
"loss": 0.0854,
"step": 960
},
{
"epoch": 2.255813953488372,
"grad_norm": 0.12598219156122908,
"learning_rate": 5.311625878677658e-06,
"loss": 0.0853,
"step": 970
},
{
"epoch": 2.2790697674418605,
"grad_norm": 0.15991189489983731,
"learning_rate": 5.005341787189832e-06,
"loss": 0.0914,
"step": 980
},
{
"epoch": 2.302325581395349,
"grad_norm": 0.10711098788498662,
"learning_rate": 4.706375431968998e-06,
"loss": 0.0833,
"step": 990
},
{
"epoch": 2.3255813953488373,
"grad_norm": 0.15238996613139555,
"learning_rate": 4.414945705636949e-06,
"loss": 0.088,
"step": 1000
},
{
"epoch": 2.3488372093023258,
"grad_norm": 0.13302573278619106,
"learning_rate": 4.131265982761614e-06,
"loss": 0.0872,
"step": 1010
},
{
"epoch": 2.3720930232558137,
"grad_norm": 0.09653650821534612,
"learning_rate": 3.855543963631685e-06,
"loss": 0.0895,
"step": 1020
},
{
"epoch": 2.395348837209302,
"grad_norm": 0.12648429239194942,
"learning_rate": 3.587981522185829e-06,
"loss": 0.0854,
"step": 1030
},
{
"epoch": 2.4186046511627906,
"grad_norm": 0.1545950486769342,
"learning_rate": 3.328774558207692e-06,
"loss": 0.0909,
"step": 1040
},
{
"epoch": 2.441860465116279,
"grad_norm": 0.10549590353114048,
"learning_rate": 3.0781128538949714e-06,
"loss": 0.0878,
"step": 1050
},
{
"epoch": 2.4651162790697674,
"grad_norm": 0.18177777322706803,
"learning_rate": 2.8361799349076143e-06,
"loss": 0.0852,
"step": 1060
},
{
"epoch": 2.488372093023256,
"grad_norm": 0.11868713958151146,
"learning_rate": 2.6031529359967833e-06,
"loss": 0.0891,
"step": 1070
},
{
"epoch": 2.511627906976744,
"grad_norm": 0.1284488259520864,
"learning_rate": 2.3792024713130284e-06,
"loss": 0.089,
"step": 1080
},
{
"epoch": 2.5348837209302326,
"grad_norm": 0.14486344360735834,
"learning_rate": 2.164492509488657e-06,
"loss": 0.0803,
"step": 1090
},
{
"epoch": 2.558139534883721,
"grad_norm": 0.12768774247632075,
"learning_rate": 1.9591802535856433e-06,
"loss": 0.0852,
"step": 1100
},
{
"epoch": 2.5813953488372094,
"grad_norm": 0.12486343748182972,
"learning_rate": 1.763416025997126e-06,
"loss": 0.0854,
"step": 1110
},
{
"epoch": 2.604651162790698,
"grad_norm": 0.1325469478337174,
"learning_rate": 1.5773431583866227e-06,
"loss": 0.09,
"step": 1120
},
{
"epoch": 2.6279069767441863,
"grad_norm": 0.1483837067276731,
"learning_rate": 1.4010978867456664e-06,
"loss": 0.0864,
"step": 1130
},
{
"epoch": 2.6511627906976747,
"grad_norm": 0.10867631647084727,
"learning_rate": 1.2348092516466032e-06,
"loss": 0.0859,
"step": 1140
},
{
"epoch": 2.6744186046511627,
"grad_norm": 0.10578223359112342,
"learning_rate": 1.0785990037636335e-06,
"loss": 0.0852,
"step": 1150
},
{
"epoch": 2.697674418604651,
"grad_norm": 0.11026052474123807,
"learning_rate": 9.325815147312739e-07,
"loss": 0.0858,
"step": 1160
},
{
"epoch": 2.7209302325581395,
"grad_norm": 0.1433636551446035,
"learning_rate": 7.968636934054741e-07,
"loss": 0.0833,
"step": 1170
},
{
"epoch": 2.744186046511628,
"grad_norm": 0.12573674370411025,
"learning_rate": 6.71544907588712e-07,
"loss": 0.0783,
"step": 1180
},
{
"epoch": 2.7674418604651163,
"grad_norm": 0.13603777866417854,
"learning_rate": 5.567169112764109e-07,
"loss": 0.0856,
"step": 1190
},
{
"epoch": 2.7906976744186047,
"grad_norm": 0.12748468973009236,
"learning_rate": 4.524637774778984e-07,
"loss": 0.0832,
"step": 1200
},
{
"epoch": 2.813953488372093,
"grad_norm": 0.10740073013695721,
"learning_rate": 3.588618366610941e-07,
"loss": 0.0859,
"step": 1210
},
{
"epoch": 2.8372093023255816,
"grad_norm": 0.16114245090981888,
"learning_rate": 2.7597962086605255e-07,
"loss": 0.0842,
"step": 1220
},
{
"epoch": 2.8604651162790695,
"grad_norm": 0.1597114067458733,
"learning_rate": 2.038778135282171e-07,
"loss": 0.0891,
"step": 1230
},
{
"epoch": 2.883720930232558,
"grad_norm": 0.15931507855411944,
"learning_rate": 1.4260920504814366e-07,
"loss": 0.086,
"step": 1240
},
{
"epoch": 2.9069767441860463,
"grad_norm": 0.15611356894003056,
"learning_rate": 9.221865414023201e-08,
"loss": 0.0808,
"step": 1250
},
{
"epoch": 2.9302325581395348,
"grad_norm": 0.12919745509778163,
"learning_rate": 5.2743054988758085e-08,
"loss": 0.0813,
"step": 1260
},
{
"epoch": 2.953488372093023,
"grad_norm": 0.11402034227218683,
"learning_rate": 2.4211310235258687e-08,
"loss": 0.0787,
"step": 1270
},
{
"epoch": 2.9767441860465116,
"grad_norm": 0.14204729077366124,
"learning_rate": 6.6443098170271276e-09,
"loss": 0.0846,
"step": 1280
},
{
"epoch": 3.0,
"grad_norm": 0.1646401717020459,
"learning_rate": 5.4915672239586045e-11,
"loss": 0.088,
"step": 1290
}
],
"logging_steps": 10,
"max_steps": 1290,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 173923848683520.0,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}