| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 6.666666666666667, |
| "eval_steps": 500, |
| "global_step": 70, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.09523809523809523, |
| "grad_norm": 6.518212470378846, |
| "learning_rate": 2.8571428571428573e-06, |
| "loss": 1.0484, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.19047619047619047, |
| "grad_norm": 6.587528529000099, |
| "learning_rate": 5.7142857142857145e-06, |
| "loss": 1.0477, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.2857142857142857, |
| "grad_norm": 5.9824742944345095, |
| "learning_rate": 8.571428571428571e-06, |
| "loss": 1.0239, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.38095238095238093, |
| "grad_norm": 2.8247619928281877, |
| "learning_rate": 1.1428571428571429e-05, |
| "loss": 0.9707, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.47619047619047616, |
| "grad_norm": 4.148158334165783, |
| "learning_rate": 1.4285714285714287e-05, |
| "loss": 0.9516, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.5714285714285714, |
| "grad_norm": 4.381779840589696, |
| "learning_rate": 1.7142857142857142e-05, |
| "loss": 0.9588, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.6666666666666666, |
| "grad_norm": 4.140751972726175, |
| "learning_rate": 2e-05, |
| "loss": 0.9207, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.7619047619047619, |
| "grad_norm": 2.954091299952174, |
| "learning_rate": 1.9987569212189224e-05, |
| "loss": 0.8828, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.8571428571428571, |
| "grad_norm": 2.351514314592787, |
| "learning_rate": 1.9950307753654016e-05, |
| "loss": 0.8362, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.9523809523809523, |
| "grad_norm": 1.5794144544087108, |
| "learning_rate": 1.9888308262251286e-05, |
| "loss": 0.7998, |
| "step": 10 |
| }, |
| { |
| "epoch": 1.0476190476190477, |
| "grad_norm": 2.0629065112208305, |
| "learning_rate": 1.9801724878485438e-05, |
| "loss": 1.2229, |
| "step": 11 |
| }, |
| { |
| "epoch": 1.1428571428571428, |
| "grad_norm": 1.2860786592265527, |
| "learning_rate": 1.969077286229078e-05, |
| "loss": 0.7605, |
| "step": 12 |
| }, |
| { |
| "epoch": 1.2380952380952381, |
| "grad_norm": 1.1910602453723949, |
| "learning_rate": 1.955572805786141e-05, |
| "loss": 0.7905, |
| "step": 13 |
| }, |
| { |
| "epoch": 1.3333333333333333, |
| "grad_norm": 1.0953254404761612, |
| "learning_rate": 1.9396926207859085e-05, |
| "loss": 0.695, |
| "step": 14 |
| }, |
| { |
| "epoch": 1.4285714285714286, |
| "grad_norm": 1.1578797823598195, |
| "learning_rate": 1.921476211870408e-05, |
| "loss": 0.7683, |
| "step": 15 |
| }, |
| { |
| "epoch": 1.5238095238095237, |
| "grad_norm": 1.1760841836308957, |
| "learning_rate": 1.900968867902419e-05, |
| "loss": 0.762, |
| "step": 16 |
| }, |
| { |
| "epoch": 1.619047619047619, |
| "grad_norm": 0.8714825773690721, |
| "learning_rate": 1.8782215733702286e-05, |
| "loss": 0.6573, |
| "step": 17 |
| }, |
| { |
| "epoch": 1.7142857142857144, |
| "grad_norm": 0.94873606383788, |
| "learning_rate": 1.8532908816321557e-05, |
| "loss": 0.7097, |
| "step": 18 |
| }, |
| { |
| "epoch": 1.8095238095238095, |
| "grad_norm": 0.8951886609154117, |
| "learning_rate": 1.826238774315995e-05, |
| "loss": 0.6851, |
| "step": 19 |
| }, |
| { |
| "epoch": 1.9047619047619047, |
| "grad_norm": 0.7867579521377509, |
| "learning_rate": 1.7971325072229227e-05, |
| "loss": 0.7499, |
| "step": 20 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 1.1475048381969282, |
| "learning_rate": 1.766044443118978e-05, |
| "loss": 1.0128, |
| "step": 21 |
| }, |
| { |
| "epoch": 2.0952380952380953, |
| "grad_norm": 0.7348961313820965, |
| "learning_rate": 1.7330518718298263e-05, |
| "loss": 0.6568, |
| "step": 22 |
| }, |
| { |
| "epoch": 2.1904761904761907, |
| "grad_norm": 0.7131868615442395, |
| "learning_rate": 1.698236818086073e-05, |
| "loss": 0.6609, |
| "step": 23 |
| }, |
| { |
| "epoch": 2.2857142857142856, |
| "grad_norm": 0.760537957157173, |
| "learning_rate": 1.6616858375968596e-05, |
| "loss": 0.6579, |
| "step": 24 |
| }, |
| { |
| "epoch": 2.380952380952381, |
| "grad_norm": 0.5972522498908271, |
| "learning_rate": 1.6234898018587336e-05, |
| "loss": 0.6283, |
| "step": 25 |
| }, |
| { |
| "epoch": 2.4761904761904763, |
| "grad_norm": 0.7786495477065015, |
| "learning_rate": 1.5837436722347902e-05, |
| "loss": 0.6227, |
| "step": 26 |
| }, |
| { |
| "epoch": 2.571428571428571, |
| "grad_norm": 0.725947883704514, |
| "learning_rate": 1.5425462638657597e-05, |
| "loss": 0.6369, |
| "step": 27 |
| }, |
| { |
| "epoch": 2.6666666666666665, |
| "grad_norm": 0.704663124546637, |
| "learning_rate": 1.5000000000000002e-05, |
| "loss": 0.623, |
| "step": 28 |
| }, |
| { |
| "epoch": 2.761904761904762, |
| "grad_norm": 0.7884749156155324, |
| "learning_rate": 1.4562106573531632e-05, |
| "loss": 0.6153, |
| "step": 29 |
| }, |
| { |
| "epoch": 2.857142857142857, |
| "grad_norm": 0.6890730354353558, |
| "learning_rate": 1.4112871031306118e-05, |
| "loss": 0.6066, |
| "step": 30 |
| }, |
| { |
| "epoch": 2.9523809523809526, |
| "grad_norm": 0.605161555100844, |
| "learning_rate": 1.3653410243663953e-05, |
| "loss": 0.6193, |
| "step": 31 |
| }, |
| { |
| "epoch": 3.0476190476190474, |
| "grad_norm": 0.962860319895067, |
| "learning_rate": 1.3184866502516846e-05, |
| "loss": 0.8645, |
| "step": 32 |
| }, |
| { |
| "epoch": 3.142857142857143, |
| "grad_norm": 0.5738476612999261, |
| "learning_rate": 1.2708404681430054e-05, |
| "loss": 0.6039, |
| "step": 33 |
| }, |
| { |
| "epoch": 3.238095238095238, |
| "grad_norm": 0.5840064742243171, |
| "learning_rate": 1.2225209339563144e-05, |
| "loss": 0.5792, |
| "step": 34 |
| }, |
| { |
| "epoch": 3.3333333333333335, |
| "grad_norm": 0.48734724202059726, |
| "learning_rate": 1.1736481776669307e-05, |
| "loss": 0.5745, |
| "step": 35 |
| }, |
| { |
| "epoch": 3.4285714285714284, |
| "grad_norm": 0.5042921073151893, |
| "learning_rate": 1.1243437046474854e-05, |
| "loss": 0.5501, |
| "step": 36 |
| }, |
| { |
| "epoch": 3.5238095238095237, |
| "grad_norm": 0.5116385124189319, |
| "learning_rate": 1.0747300935864245e-05, |
| "loss": 0.5759, |
| "step": 37 |
| }, |
| { |
| "epoch": 3.619047619047619, |
| "grad_norm": 0.49856827886397076, |
| "learning_rate": 1.0249306917380731e-05, |
| "loss": 0.5844, |
| "step": 38 |
| }, |
| { |
| "epoch": 3.7142857142857144, |
| "grad_norm": 0.41385053944230626, |
| "learning_rate": 9.750693082619274e-06, |
| "loss": 0.5163, |
| "step": 39 |
| }, |
| { |
| "epoch": 3.8095238095238093, |
| "grad_norm": 0.502976399008024, |
| "learning_rate": 9.252699064135759e-06, |
| "loss": 0.5796, |
| "step": 40 |
| }, |
| { |
| "epoch": 3.9047619047619047, |
| "grad_norm": 0.471416846154755, |
| "learning_rate": 8.756562953525151e-06, |
| "loss": 0.5052, |
| "step": 41 |
| }, |
| { |
| "epoch": 4.0, |
| "grad_norm": 0.7552055603191463, |
| "learning_rate": 8.263518223330698e-06, |
| "loss": 0.8603, |
| "step": 42 |
| }, |
| { |
| "epoch": 4.095238095238095, |
| "grad_norm": 0.490918321678198, |
| "learning_rate": 7.774790660436857e-06, |
| "loss": 0.5393, |
| "step": 43 |
| }, |
| { |
| "epoch": 4.190476190476191, |
| "grad_norm": 0.5965615889176096, |
| "learning_rate": 7.291595318569951e-06, |
| "loss": 0.5203, |
| "step": 44 |
| }, |
| { |
| "epoch": 4.285714285714286, |
| "grad_norm": 0.4643882480068538, |
| "learning_rate": 6.815133497483157e-06, |
| "loss": 0.5165, |
| "step": 45 |
| }, |
| { |
| "epoch": 4.380952380952381, |
| "grad_norm": 0.45058368032549867, |
| "learning_rate": 6.34658975633605e-06, |
| "loss": 0.5066, |
| "step": 46 |
| }, |
| { |
| "epoch": 4.476190476190476, |
| "grad_norm": 0.4816892118134155, |
| "learning_rate": 5.887128968693887e-06, |
| "loss": 0.5163, |
| "step": 47 |
| }, |
| { |
| "epoch": 4.571428571428571, |
| "grad_norm": 0.44971728833802255, |
| "learning_rate": 5.43789342646837e-06, |
| "loss": 0.5095, |
| "step": 48 |
| }, |
| { |
| "epoch": 4.666666666666667, |
| "grad_norm": 0.48469532379433633, |
| "learning_rate": 5.000000000000003e-06, |
| "loss": 0.5093, |
| "step": 49 |
| }, |
| { |
| "epoch": 4.761904761904762, |
| "grad_norm": 0.3704729631438316, |
| "learning_rate": 4.5745373613424075e-06, |
| "loss": 0.5121, |
| "step": 50 |
| }, |
| { |
| "epoch": 4.857142857142857, |
| "grad_norm": 0.3570173560386269, |
| "learning_rate": 4.162563277652104e-06, |
| "loss": 0.5097, |
| "step": 51 |
| }, |
| { |
| "epoch": 4.9523809523809526, |
| "grad_norm": 0.44002402557950926, |
| "learning_rate": 3.7651019814126656e-06, |
| "loss": 0.5074, |
| "step": 52 |
| }, |
| { |
| "epoch": 5.0476190476190474, |
| "grad_norm": 0.6807163852906846, |
| "learning_rate": 3.3831416240314085e-06, |
| "loss": 0.7554, |
| "step": 53 |
| }, |
| { |
| "epoch": 5.142857142857143, |
| "grad_norm": 0.3074241890120183, |
| "learning_rate": 3.017631819139273e-06, |
| "loss": 0.4943, |
| "step": 54 |
| }, |
| { |
| "epoch": 5.238095238095238, |
| "grad_norm": 0.3753271169190582, |
| "learning_rate": 2.669481281701739e-06, |
| "loss": 0.5083, |
| "step": 55 |
| }, |
| { |
| "epoch": 5.333333333333333, |
| "grad_norm": 0.3775287344690129, |
| "learning_rate": 2.339555568810221e-06, |
| "loss": 0.4687, |
| "step": 56 |
| }, |
| { |
| "epoch": 5.428571428571429, |
| "grad_norm": 0.3444811821839712, |
| "learning_rate": 2.0286749277707783e-06, |
| "loss": 0.4545, |
| "step": 57 |
| }, |
| { |
| "epoch": 5.523809523809524, |
| "grad_norm": 0.3376918642935278, |
| "learning_rate": 1.7376122568400533e-06, |
| "loss": 0.5258, |
| "step": 58 |
| }, |
| { |
| "epoch": 5.619047619047619, |
| "grad_norm": 0.32848729600847976, |
| "learning_rate": 1.467091183678444e-06, |
| "loss": 0.493, |
| "step": 59 |
| }, |
| { |
| "epoch": 5.714285714285714, |
| "grad_norm": 0.34075492376610994, |
| "learning_rate": 1.2177842662977136e-06, |
| "loss": 0.4899, |
| "step": 60 |
| }, |
| { |
| "epoch": 5.809523809523809, |
| "grad_norm": 0.3020199394879285, |
| "learning_rate": 9.903113209758098e-07, |
| "loss": 0.4378, |
| "step": 61 |
| }, |
| { |
| "epoch": 5.904761904761905, |
| "grad_norm": 0.3167555828910423, |
| "learning_rate": 7.852378812959227e-07, |
| "loss": 0.4995, |
| "step": 62 |
| }, |
| { |
| "epoch": 6.0, |
| "grad_norm": 0.47472248835572706, |
| "learning_rate": 6.030737921409169e-07, |
| "loss": 0.7396, |
| "step": 63 |
| }, |
| { |
| "epoch": 6.095238095238095, |
| "grad_norm": 0.2685001460523142, |
| "learning_rate": 4.4427194213859216e-07, |
| "loss": 0.4855, |
| "step": 64 |
| }, |
| { |
| "epoch": 6.190476190476191, |
| "grad_norm": 0.2688660442454094, |
| "learning_rate": 3.0922713770922155e-07, |
| "loss": 0.477, |
| "step": 65 |
| }, |
| { |
| "epoch": 6.285714285714286, |
| "grad_norm": 0.26691359090490074, |
| "learning_rate": 1.9827512151456175e-07, |
| "loss": 0.4782, |
| "step": 66 |
| }, |
| { |
| "epoch": 6.380952380952381, |
| "grad_norm": 0.28163683547084395, |
| "learning_rate": 1.1169173774871478e-07, |
| "loss": 0.4759, |
| "step": 67 |
| }, |
| { |
| "epoch": 6.476190476190476, |
| "grad_norm": 0.2833035558570076, |
| "learning_rate": 4.9692246345985905e-08, |
| "loss": 0.4613, |
| "step": 68 |
| }, |
| { |
| "epoch": 6.571428571428571, |
| "grad_norm": 0.25566377284374026, |
| "learning_rate": 1.2430787810776556e-08, |
| "loss": 0.4822, |
| "step": 69 |
| }, |
| { |
| "epoch": 6.666666666666667, |
| "grad_norm": 0.28218666179330326, |
| "learning_rate": 0.0, |
| "loss": 0.4768, |
| "step": 70 |
| }, |
| { |
| "epoch": 6.666666666666667, |
| "step": 70, |
| "total_flos": 1.90120829386752e+17, |
| "train_loss": 0.6532361681972231, |
| "train_runtime": 3137.1757, |
| "train_samples_per_second": 2.231, |
| "train_steps_per_second": 0.022 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 70, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 7, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 1.90120829386752e+17, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|