push_block_mujoco / trainer_state.json
theconstruct-ai's picture
Upload folder using huggingface_hub
8274c11
{
"best_global_step": null,
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.2,
"eval_steps": 500,
"global_step": 2000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"grad_norm": 1.6891276836395264,
"learning_rate": 1.8e-06,
"loss": 1.2585,
"step": 10
},
{
"grad_norm": 1.2421960830688477,
"learning_rate": 3.8e-06,
"loss": 1.2386,
"step": 20
},
{
"grad_norm": 1.0935646295547485,
"learning_rate": 5.8e-06,
"loss": 1.1761,
"step": 30
},
{
"grad_norm": 0.7210215330123901,
"learning_rate": 7.8e-06,
"loss": 1.0877,
"step": 40
},
{
"grad_norm": 0.34306979179382324,
"learning_rate": 9.800000000000001e-06,
"loss": 1.0339,
"step": 50
},
{
"grad_norm": 0.18134735524654388,
"learning_rate": 1.18e-05,
"loss": 1.0275,
"step": 60
},
{
"grad_norm": 0.15449777245521545,
"learning_rate": 1.3800000000000002e-05,
"loss": 1.0225,
"step": 70
},
{
"grad_norm": 0.15307500958442688,
"learning_rate": 1.58e-05,
"loss": 1.0188,
"step": 80
},
{
"grad_norm": 0.16959135234355927,
"learning_rate": 1.78e-05,
"loss": 1.0164,
"step": 90
},
{
"grad_norm": 0.1843302696943283,
"learning_rate": 1.9800000000000004e-05,
"loss": 1.019,
"step": 100
},
{
"grad_norm": 0.5004823803901672,
"learning_rate": 2.18e-05,
"loss": 1.007,
"step": 110
},
{
"grad_norm": 0.3533937931060791,
"learning_rate": 2.38e-05,
"loss": 0.9837,
"step": 120
},
{
"grad_norm": 0.492814302444458,
"learning_rate": 2.58e-05,
"loss": 0.95,
"step": 130
},
{
"grad_norm": 0.6054441332817078,
"learning_rate": 2.7800000000000005e-05,
"loss": 0.9005,
"step": 140
},
{
"grad_norm": 0.6737467646598816,
"learning_rate": 2.98e-05,
"loss": 0.8446,
"step": 150
},
{
"grad_norm": 1.3609671592712402,
"learning_rate": 3.18e-05,
"loss": 0.7917,
"step": 160
},
{
"grad_norm": 0.7027902603149414,
"learning_rate": 3.38e-05,
"loss": 0.7423,
"step": 170
},
{
"grad_norm": 0.6924164891242981,
"learning_rate": 3.58e-05,
"loss": 0.6849,
"step": 180
},
{
"grad_norm": 0.7687128186225891,
"learning_rate": 3.7800000000000004e-05,
"loss": 0.6385,
"step": 190
},
{
"grad_norm": 1.3768059015274048,
"learning_rate": 3.9800000000000005e-05,
"loss": 0.5934,
"step": 200
},
{
"grad_norm": 0.7467365860939026,
"learning_rate": 4.18e-05,
"loss": 0.5587,
"step": 210
},
{
"grad_norm": 0.9139049649238586,
"learning_rate": 4.38e-05,
"loss": 0.5173,
"step": 220
},
{
"grad_norm": 1.048671841621399,
"learning_rate": 4.58e-05,
"loss": 0.4742,
"step": 230
},
{
"grad_norm": 1.2771224975585938,
"learning_rate": 4.78e-05,
"loss": 0.4459,
"step": 240
},
{
"grad_norm": 1.6271154880523682,
"learning_rate": 4.9800000000000004e-05,
"loss": 0.4083,
"step": 250
},
{
"grad_norm": 1.4045976400375366,
"learning_rate": 5.1800000000000005e-05,
"loss": 0.3832,
"step": 260
},
{
"grad_norm": 1.031724214553833,
"learning_rate": 5.380000000000001e-05,
"loss": 0.3571,
"step": 270
},
{
"grad_norm": 0.9413377046585083,
"learning_rate": 5.580000000000001e-05,
"loss": 0.3294,
"step": 280
},
{
"grad_norm": 0.990053653717041,
"learning_rate": 5.7799999999999995e-05,
"loss": 0.3072,
"step": 290
},
{
"grad_norm": 0.9019013047218323,
"learning_rate": 5.9800000000000003e-05,
"loss": 0.2993,
"step": 300
},
{
"grad_norm": 0.9619300365447998,
"learning_rate": 6.18e-05,
"loss": 0.2908,
"step": 310
},
{
"grad_norm": 1.0667186975479126,
"learning_rate": 6.38e-05,
"loss": 0.2681,
"step": 320
},
{
"grad_norm": 1.119122862815857,
"learning_rate": 6.58e-05,
"loss": 0.2608,
"step": 330
},
{
"grad_norm": 1.1136410236358643,
"learning_rate": 6.780000000000001e-05,
"loss": 0.2568,
"step": 340
},
{
"grad_norm": 1.5547524690628052,
"learning_rate": 6.98e-05,
"loss": 0.2301,
"step": 350
},
{
"grad_norm": 1.0856785774230957,
"learning_rate": 7.18e-05,
"loss": 0.2166,
"step": 360
},
{
"grad_norm": 0.9325472712516785,
"learning_rate": 7.38e-05,
"loss": 0.1965,
"step": 370
},
{
"grad_norm": 0.9351710081100464,
"learning_rate": 7.58e-05,
"loss": 0.1845,
"step": 380
},
{
"grad_norm": 1.274675726890564,
"learning_rate": 7.780000000000001e-05,
"loss": 0.1701,
"step": 390
},
{
"grad_norm": 1.1908860206604004,
"learning_rate": 7.98e-05,
"loss": 0.1534,
"step": 400
},
{
"grad_norm": 1.1736817359924316,
"learning_rate": 8.18e-05,
"loss": 0.1455,
"step": 410
},
{
"grad_norm": 1.7282778024673462,
"learning_rate": 8.38e-05,
"loss": 0.1299,
"step": 420
},
{
"grad_norm": 1.0969237089157104,
"learning_rate": 8.58e-05,
"loss": 0.1252,
"step": 430
},
{
"grad_norm": 1.1998040676116943,
"learning_rate": 8.78e-05,
"loss": 0.1132,
"step": 440
},
{
"grad_norm": 1.1225069761276245,
"learning_rate": 8.98e-05,
"loss": 0.1111,
"step": 450
},
{
"grad_norm": 1.034788727760315,
"learning_rate": 9.180000000000001e-05,
"loss": 0.104,
"step": 460
},
{
"grad_norm": 1.0320745706558228,
"learning_rate": 9.38e-05,
"loss": 0.0981,
"step": 470
},
{
"grad_norm": 0.8197027444839478,
"learning_rate": 9.58e-05,
"loss": 0.0934,
"step": 480
},
{
"grad_norm": 1.0704108476638794,
"learning_rate": 9.78e-05,
"loss": 0.0844,
"step": 490
},
{
"grad_norm": 1.1035730838775635,
"learning_rate": 9.98e-05,
"loss": 0.0815,
"step": 500
},
{
"grad_norm": 0.8952515721321106,
"learning_rate": 9.9999778549206e-05,
"loss": 0.0764,
"step": 510
},
{
"grad_norm": 1.274247407913208,
"learning_rate": 9.999901304280685e-05,
"loss": 0.0774,
"step": 520
},
{
"grad_norm": 0.9264320135116577,
"learning_rate": 9.999770075521164e-05,
"loss": 0.0783,
"step": 530
},
{
"grad_norm": 0.8261769413948059,
"learning_rate": 9.99958417007713e-05,
"loss": 0.0771,
"step": 540
},
{
"grad_norm": 0.9270550608634949,
"learning_rate": 9.999343589981615e-05,
"loss": 0.0661,
"step": 550
},
{
"grad_norm": 1.1616880893707275,
"learning_rate": 9.999048337865568e-05,
"loss": 0.0755,
"step": 560
},
{
"grad_norm": 0.9049257636070251,
"learning_rate": 9.998698416957815e-05,
"loss": 0.0765,
"step": 570
},
{
"grad_norm": 1.1724106073379517,
"learning_rate": 9.998293831085037e-05,
"loss": 0.0712,
"step": 580
},
{
"grad_norm": 0.8528197407722473,
"learning_rate": 9.997834584671719e-05,
"loss": 0.0684,
"step": 590
},
{
"grad_norm": 0.9542710781097412,
"learning_rate": 9.997320682740107e-05,
"loss": 0.0716,
"step": 600
},
{
"grad_norm": 0.7609046101570129,
"learning_rate": 9.996752130910149e-05,
"loss": 0.0672,
"step": 610
},
{
"grad_norm": 0.770867109298706,
"learning_rate": 9.99612893539944e-05,
"loss": 0.0657,
"step": 620
},
{
"grad_norm": 0.5650806427001953,
"learning_rate": 9.995451103023144e-05,
"loss": 0.064,
"step": 630
},
{
"grad_norm": 0.9630634784698486,
"learning_rate": 9.994718641193928e-05,
"loss": 0.0668,
"step": 640
},
{
"grad_norm": 0.7161832451820374,
"learning_rate": 9.993931557921874e-05,
"loss": 0.0659,
"step": 650
},
{
"grad_norm": 0.9892486333847046,
"learning_rate": 9.993089861814402e-05,
"loss": 0.0627,
"step": 660
},
{
"grad_norm": 0.7624261379241943,
"learning_rate": 9.992193562076166e-05,
"loss": 0.0597,
"step": 670
},
{
"grad_norm": 0.8515845537185669,
"learning_rate": 9.991242668508954e-05,
"loss": 0.0556,
"step": 680
},
{
"grad_norm": 0.9307328462600708,
"learning_rate": 9.990237191511587e-05,
"loss": 0.053,
"step": 690
},
{
"grad_norm": 1.0245169401168823,
"learning_rate": 9.989177142079802e-05,
"loss": 0.0567,
"step": 700
},
{
"grad_norm": 0.8914056420326233,
"learning_rate": 9.988062531806126e-05,
"loss": 0.0565,
"step": 710
},
{
"grad_norm": 0.9028451442718506,
"learning_rate": 9.986893372879762e-05,
"loss": 0.0575,
"step": 720
},
{
"grad_norm": 0.8873146772384644,
"learning_rate": 9.985669678086443e-05,
"loss": 0.0602,
"step": 730
},
{
"grad_norm": 0.7956727147102356,
"learning_rate": 9.984391460808298e-05,
"loss": 0.0602,
"step": 740
},
{
"grad_norm": 0.7061982154846191,
"learning_rate": 9.983058735023709e-05,
"loss": 0.0587,
"step": 750
},
{
"grad_norm": 0.8428799510002136,
"learning_rate": 9.98167151530715e-05,
"loss": 0.0552,
"step": 760
},
{
"grad_norm": 0.9017972946166992,
"learning_rate": 9.980229816829034e-05,
"loss": 0.0595,
"step": 770
},
{
"grad_norm": 0.8163244128227234,
"learning_rate": 9.978733655355544e-05,
"loss": 0.057,
"step": 780
},
{
"grad_norm": 0.7705178260803223,
"learning_rate": 9.977183047248464e-05,
"loss": 0.0512,
"step": 790
},
{
"grad_norm": 1.0152888298034668,
"learning_rate": 9.975578009464992e-05,
"loss": 0.0532,
"step": 800
},
{
"grad_norm": 0.7115610241889954,
"learning_rate": 9.97391855955757e-05,
"loss": 0.0475,
"step": 810
},
{
"grad_norm": 0.8815870881080627,
"learning_rate": 9.972204715673669e-05,
"loss": 0.0495,
"step": 820
},
{
"grad_norm": 0.6733144521713257,
"learning_rate": 9.970436496555617e-05,
"loss": 0.05,
"step": 830
},
{
"grad_norm": 0.8259405493736267,
"learning_rate": 9.968613921540373e-05,
"loss": 0.0519,
"step": 840
},
{
"grad_norm": 0.7688018679618835,
"learning_rate": 9.966737010559326e-05,
"loss": 0.0525,
"step": 850
},
{
"grad_norm": 0.6979959607124329,
"learning_rate": 9.964805784138072e-05,
"loss": 0.0511,
"step": 860
},
{
"grad_norm": 0.7118434309959412,
"learning_rate": 9.962820263396195e-05,
"loss": 0.0482,
"step": 870
},
{
"grad_norm": 0.8046584129333496,
"learning_rate": 9.960780470047033e-05,
"loss": 0.0522,
"step": 880
},
{
"grad_norm": 0.6603678464889526,
"learning_rate": 9.958686426397437e-05,
"loss": 0.0499,
"step": 890
},
{
"grad_norm": 0.7854413986206055,
"learning_rate": 9.956538155347534e-05,
"loss": 0.052,
"step": 900
},
{
"grad_norm": 0.5703814029693604,
"learning_rate": 9.95433568039047e-05,
"loss": 0.0455,
"step": 910
},
{
"grad_norm": 0.8428476452827454,
"learning_rate": 9.952079025612162e-05,
"loss": 0.0443,
"step": 920
},
{
"grad_norm": 0.7631692886352539,
"learning_rate": 9.949768215691022e-05,
"loss": 0.0468,
"step": 930
},
{
"grad_norm": 0.7209548950195312,
"learning_rate": 9.9474032758977e-05,
"loss": 0.0474,
"step": 940
},
{
"grad_norm": 0.6969509124755859,
"learning_rate": 9.944984232094794e-05,
"loss": 0.0501,
"step": 950
},
{
"grad_norm": 0.7608206272125244,
"learning_rate": 9.942511110736584e-05,
"loss": 0.0471,
"step": 960
},
{
"grad_norm": 0.7608780860900879,
"learning_rate": 9.939983938868726e-05,
"loss": 0.0476,
"step": 970
},
{
"grad_norm": 0.6730809807777405,
"learning_rate": 9.93740274412797e-05,
"loss": 0.0431,
"step": 980
},
{
"grad_norm": 0.8944201469421387,
"learning_rate": 9.934767554741846e-05,
"loss": 0.0511,
"step": 990
},
{
"grad_norm": 0.8130436539649963,
"learning_rate": 9.932078399528361e-05,
"loss": 0.0424,
"step": 1000
},
{
"grad_norm": 0.7339826822280884,
"learning_rate": 9.929335307895689e-05,
"loss": 0.043,
"step": 1010
},
{
"grad_norm": 0.7107013463973999,
"learning_rate": 9.926538309841839e-05,
"loss": 0.0498,
"step": 1020
},
{
"grad_norm": 0.7686504125595093,
"learning_rate": 9.923687435954334e-05,
"loss": 0.0417,
"step": 1030
},
{
"grad_norm": 0.5518864989280701,
"learning_rate": 9.920782717409873e-05,
"loss": 0.0397,
"step": 1040
},
{
"grad_norm": 0.4921113848686218,
"learning_rate": 9.917824185973994e-05,
"loss": 0.0386,
"step": 1050
},
{
"grad_norm": 0.5414538979530334,
"learning_rate": 9.914811874000723e-05,
"loss": 0.0383,
"step": 1060
},
{
"grad_norm": 0.7523898482322693,
"learning_rate": 9.911745814432218e-05,
"loss": 0.0422,
"step": 1070
},
{
"grad_norm": 0.5954861640930176,
"learning_rate": 9.90862604079842e-05,
"loss": 0.0442,
"step": 1080
},
{
"grad_norm": 0.7658764719963074,
"learning_rate": 9.90545258721667e-05,
"loss": 0.0417,
"step": 1090
},
{
"grad_norm": 0.6343804001808167,
"learning_rate": 9.90222548839135e-05,
"loss": 0.0401,
"step": 1100
},
{
"grad_norm": 0.4654010534286499,
"learning_rate": 9.898944779613495e-05,
"loss": 0.0372,
"step": 1110
},
{
"grad_norm": 0.7204039096832275,
"learning_rate": 9.89561049676041e-05,
"loss": 0.0399,
"step": 1120
},
{
"grad_norm": 0.6943351626396179,
"learning_rate": 9.89222267629528e-05,
"loss": 0.0425,
"step": 1130
},
{
"grad_norm": 0.7369424104690552,
"learning_rate": 9.888781355266763e-05,
"loss": 0.0418,
"step": 1140
},
{
"grad_norm": 0.6650173664093018,
"learning_rate": 9.885286571308598e-05,
"loss": 0.0382,
"step": 1150
},
{
"grad_norm": 0.5390568375587463,
"learning_rate": 9.881738362639182e-05,
"loss": 0.0415,
"step": 1160
},
{
"grad_norm": 0.8118464946746826,
"learning_rate": 9.878136768061154e-05,
"loss": 0.0432,
"step": 1170
},
{
"grad_norm": 0.7557201981544495,
"learning_rate": 9.874481826960979e-05,
"loss": 0.0424,
"step": 1180
},
{
"grad_norm": 0.6853317022323608,
"learning_rate": 9.870773579308503e-05,
"loss": 0.0398,
"step": 1190
},
{
"grad_norm": 0.6386889815330505,
"learning_rate": 9.867012065656533e-05,
"loss": 0.0427,
"step": 1200
},
{
"grad_norm": 0.706305980682373,
"learning_rate": 9.863197327140376e-05,
"loss": 0.0375,
"step": 1210
},
{
"grad_norm": 0.7585138082504272,
"learning_rate": 9.859329405477403e-05,
"loss": 0.038,
"step": 1220
},
{
"grad_norm": 0.6497421264648438,
"learning_rate": 9.855408342966585e-05,
"loss": 0.0397,
"step": 1230
},
{
"grad_norm": 0.5648456811904907,
"learning_rate": 9.851434182488033e-05,
"loss": 0.038,
"step": 1240
},
{
"grad_norm": 0.7392773628234863,
"learning_rate": 9.84740696750253e-05,
"loss": 0.0375,
"step": 1250
},
{
"grad_norm": 0.7451732754707336,
"learning_rate": 9.843326742051055e-05,
"loss": 0.0384,
"step": 1260
},
{
"grad_norm": 0.5808190703392029,
"learning_rate": 9.839193550754297e-05,
"loss": 0.0374,
"step": 1270
},
{
"grad_norm": 0.7281758785247803,
"learning_rate": 9.835007438812177e-05,
"loss": 0.0418,
"step": 1280
},
{
"grad_norm": 0.6007897257804871,
"learning_rate": 9.830768452003341e-05,
"loss": 0.0375,
"step": 1290
},
{
"grad_norm": 0.6821262836456299,
"learning_rate": 9.826476636684671e-05,
"loss": 0.0369,
"step": 1300
},
{
"grad_norm": 0.6722646951675415,
"learning_rate": 9.822132039790773e-05,
"loss": 0.0368,
"step": 1310
},
{
"grad_norm": 0.5557746887207031,
"learning_rate": 9.817734708833461e-05,
"loss": 0.0337,
"step": 1320
},
{
"grad_norm": 0.6717876195907593,
"learning_rate": 9.813284691901243e-05,
"loss": 0.0423,
"step": 1330
},
{
"grad_norm": 0.6293877363204956,
"learning_rate": 9.808782037658792e-05,
"loss": 0.0408,
"step": 1340
},
{
"grad_norm": 0.6186095476150513,
"learning_rate": 9.804226795346411e-05,
"loss": 0.0382,
"step": 1350
},
{
"grad_norm": 0.8207311630249023,
"learning_rate": 9.799619014779503e-05,
"loss": 0.0371,
"step": 1360
},
{
"grad_norm": 0.5756433010101318,
"learning_rate": 9.794958746348013e-05,
"loss": 0.0384,
"step": 1370
},
{
"grad_norm": 0.653166651725769,
"learning_rate": 9.790246041015896e-05,
"loss": 0.034,
"step": 1380
},
{
"grad_norm": 0.6604922413825989,
"learning_rate": 9.785480950320538e-05,
"loss": 0.0347,
"step": 1390
},
{
"grad_norm": 0.7043635249137878,
"learning_rate": 9.78066352637221e-05,
"loss": 0.0338,
"step": 1400
},
{
"grad_norm": 0.6509787440299988,
"learning_rate": 9.775793821853488e-05,
"loss": 0.0355,
"step": 1410
},
{
"grad_norm": 0.6967816948890686,
"learning_rate": 9.77087189001868e-05,
"loss": 0.0353,
"step": 1420
},
{
"grad_norm": 0.8638598322868347,
"learning_rate": 9.765897784693243e-05,
"loss": 0.0361,
"step": 1430
},
{
"grad_norm": 0.6340733170509338,
"learning_rate": 9.760871560273197e-05,
"loss": 0.0351,
"step": 1440
},
{
"grad_norm": 0.6075161695480347,
"learning_rate": 9.755793271724526e-05,
"loss": 0.0342,
"step": 1450
},
{
"grad_norm": 0.6330815553665161,
"learning_rate": 9.750662974582584e-05,
"loss": 0.0406,
"step": 1460
},
{
"grad_norm": 0.5282254815101624,
"learning_rate": 9.745480724951473e-05,
"loss": 0.0351,
"step": 1470
},
{
"grad_norm": 0.7070049047470093,
"learning_rate": 9.740246579503447e-05,
"loss": 0.0366,
"step": 1480
},
{
"grad_norm": 0.7055975198745728,
"learning_rate": 9.734960595478284e-05,
"loss": 0.0357,
"step": 1490
},
{
"grad_norm": 0.6384962797164917,
"learning_rate": 9.729622830682657e-05,
"loss": 0.0343,
"step": 1500
},
{
"grad_norm": 0.5874096751213074,
"learning_rate": 9.724233343489504e-05,
"loss": 0.0372,
"step": 1510
},
{
"grad_norm": 0.5690801739692688,
"learning_rate": 9.718792192837396e-05,
"loss": 0.0319,
"step": 1520
},
{
"grad_norm": 0.6367931962013245,
"learning_rate": 9.713299438229886e-05,
"loss": 0.037,
"step": 1530
},
{
"grad_norm": 0.5469662547111511,
"learning_rate": 9.707755139734855e-05,
"loss": 0.0365,
"step": 1540
},
{
"grad_norm": 0.556199848651886,
"learning_rate": 9.702159357983866e-05,
"loss": 0.0342,
"step": 1550
},
{
"grad_norm": 0.6330277919769287,
"learning_rate": 9.696512154171492e-05,
"loss": 0.0363,
"step": 1560
},
{
"grad_norm": 0.6156578660011292,
"learning_rate": 9.690813590054645e-05,
"loss": 0.036,
"step": 1570
},
{
"grad_norm": 0.633952260017395,
"learning_rate": 9.685063727951914e-05,
"loss": 0.0342,
"step": 1580
},
{
"grad_norm": 0.4720134735107422,
"learning_rate": 9.679262630742865e-05,
"loss": 0.0352,
"step": 1590
},
{
"grad_norm": 0.6747561097145081,
"learning_rate": 9.673410361867373e-05,
"loss": 0.0361,
"step": 1600
},
{
"grad_norm": 0.5455501675605774,
"learning_rate": 9.667506985324909e-05,
"loss": 0.0332,
"step": 1610
},
{
"grad_norm": 0.698010265827179,
"learning_rate": 9.661552565673855e-05,
"loss": 0.0309,
"step": 1620
},
{
"grad_norm": 0.6324073672294617,
"learning_rate": 9.655547168030789e-05,
"loss": 0.035,
"step": 1630
},
{
"grad_norm": 0.6724660992622375,
"learning_rate": 9.649490858069777e-05,
"loss": 0.0327,
"step": 1640
},
{
"grad_norm": 0.4986797273159027,
"learning_rate": 9.643383702021658e-05,
"loss": 0.032,
"step": 1650
},
{
"grad_norm": 0.6412114500999451,
"learning_rate": 9.637225766673307e-05,
"loss": 0.0309,
"step": 1660
},
{
"grad_norm": 0.5809900164604187,
"learning_rate": 9.631017119366922e-05,
"loss": 0.0319,
"step": 1670
},
{
"grad_norm": 0.5835633277893066,
"learning_rate": 9.624757827999273e-05,
"loss": 0.0339,
"step": 1680
},
{
"grad_norm": 0.5545461773872375,
"learning_rate": 9.618447961020971e-05,
"loss": 0.039,
"step": 1690
},
{
"grad_norm": 0.6023017168045044,
"learning_rate": 9.612087587435707e-05,
"loss": 0.038,
"step": 1700
},
{
"grad_norm": 0.6422150135040283,
"learning_rate": 9.605676776799508e-05,
"loss": 0.0342,
"step": 1710
},
{
"grad_norm": 0.6100661158561707,
"learning_rate": 9.599215599219973e-05,
"loss": 0.0332,
"step": 1720
},
{
"grad_norm": 0.6025664806365967,
"learning_rate": 9.592704125355505e-05,
"loss": 0.0332,
"step": 1730
},
{
"grad_norm": 0.5365665555000305,
"learning_rate": 9.586142426414538e-05,
"loss": 0.0322,
"step": 1740
},
{
"grad_norm": 0.6416350603103638,
"learning_rate": 9.57953057415476e-05,
"loss": 0.0356,
"step": 1750
},
{
"grad_norm": 0.5091562867164612,
"learning_rate": 9.572868640882328e-05,
"loss": 0.0327,
"step": 1760
},
{
"grad_norm": 0.506897509098053,
"learning_rate": 9.56615669945108e-05,
"loss": 0.0347,
"step": 1770
},
{
"grad_norm": 0.5713956356048584,
"learning_rate": 9.55939482326173e-05,
"loss": 0.0324,
"step": 1780
},
{
"grad_norm": 0.47305065393447876,
"learning_rate": 9.552583086261069e-05,
"loss": 0.034,
"step": 1790
},
{
"grad_norm": 0.5677897334098816,
"learning_rate": 9.545721562941168e-05,
"loss": 0.0319,
"step": 1800
},
{
"grad_norm": 0.5385820865631104,
"learning_rate": 9.538810328338543e-05,
"loss": 0.0306,
"step": 1810
},
{
"grad_norm": 0.67743319272995,
"learning_rate": 9.531849458033349e-05,
"loss": 0.0336,
"step": 1820
},
{
"grad_norm": 0.6273674368858337,
"learning_rate": 9.524839028148547e-05,
"loss": 0.0325,
"step": 1830
},
{
"grad_norm": 0.5729127526283264,
"learning_rate": 9.517779115349077e-05,
"loss": 0.033,
"step": 1840
},
{
"grad_norm": 0.5738729238510132,
"learning_rate": 9.510669796841014e-05,
"loss": 0.0327,
"step": 1850
},
{
"grad_norm": 0.46439313888549805,
"learning_rate": 9.503511150370727e-05,
"loss": 0.0349,
"step": 1860
},
{
"grad_norm": 0.49889275431632996,
"learning_rate": 9.496303254224024e-05,
"loss": 0.0329,
"step": 1870
},
{
"grad_norm": 0.466863751411438,
"learning_rate": 9.489046187225306e-05,
"loss": 0.032,
"step": 1880
},
{
"grad_norm": 0.49700266122817993,
"learning_rate": 9.481740028736692e-05,
"loss": 0.0335,
"step": 1890
},
{
"grad_norm": 0.6205595135688782,
"learning_rate": 9.474384858657164e-05,
"loss": 0.0315,
"step": 1900
},
{
"grad_norm": 0.5160883665084839,
"learning_rate": 9.466980757421679e-05,
"loss": 0.0313,
"step": 1910
},
{
"grad_norm": 0.5717476010322571,
"learning_rate": 9.459527806000305e-05,
"loss": 0.0333,
"step": 1920
},
{
"grad_norm": 0.5541268587112427,
"learning_rate": 9.452026085897325e-05,
"loss": 0.0327,
"step": 1930
},
{
"grad_norm": 0.5027980208396912,
"learning_rate": 9.444475679150348e-05,
"loss": 0.0314,
"step": 1940
},
{
"grad_norm": 0.5508278608322144,
"learning_rate": 9.436876668329411e-05,
"loss": 0.0303,
"step": 1950
},
{
"grad_norm": 0.5171688199043274,
"learning_rate": 9.429229136536079e-05,
"loss": 0.0298,
"step": 1960
},
{
"grad_norm": 0.6098824143409729,
"learning_rate": 9.421533167402534e-05,
"loss": 0.0314,
"step": 1970
},
{
"grad_norm": 0.5086256265640259,
"learning_rate": 9.413788845090666e-05,
"loss": 0.0317,
"step": 1980
},
{
"grad_norm": 0.6548255681991577,
"learning_rate": 9.405996254291136e-05,
"loss": 0.0311,
"step": 1990
},
{
"grad_norm": 0.6564981341362,
"learning_rate": 9.398155480222474e-05,
"loss": 0.0309,
"step": 2000
}
],
"logging_steps": 10,
"max_steps": 10000,
"num_input_tokens_seen": 0,
"num_train_epochs": 9223372036854775807,
"save_steps": 1000,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 0.0,
"train_batch_size": 64,
"trial_name": null,
"trial_params": null
}