| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 5.0, |
| "global_step": 2980, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.9993288590604027e-05, |
| "loss": 2.418, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0, |
| "learning_rate": 1.9986577181208055e-05, |
| "loss": 2.4023, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.997986577181208e-05, |
| "loss": 2.3594, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.997315436241611e-05, |
| "loss": 2.25, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9966442953020134e-05, |
| "loss": 2.2402, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9959731543624163e-05, |
| "loss": 2.2305, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9953020134228188e-05, |
| "loss": 2.2227, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 1.9946308724832216e-05, |
| "loss": 2.1621, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.993959731543624e-05, |
| "loss": 2.0977, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.993288590604027e-05, |
| "loss": 2.1035, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.99261744966443e-05, |
| "loss": 2.1094, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9919463087248324e-05, |
| "loss": 2.0752, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9912751677852352e-05, |
| "loss": 2.0488, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.9906040268456378e-05, |
| "loss": 2.0547, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.9899328859060406e-05, |
| "loss": 2.0107, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.989261744966443e-05, |
| "loss": 2.0176, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.988590604026846e-05, |
| "loss": 1.9883, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.9879194630872485e-05, |
| "loss": 1.9209, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.987248322147651e-05, |
| "loss": 1.9951, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.986577181208054e-05, |
| "loss": 1.9531, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.9859060402684564e-05, |
| "loss": 1.9229, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.9852348993288592e-05, |
| "loss": 1.9277, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.9845637583892618e-05, |
| "loss": 1.9395, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.9838926174496646e-05, |
| "loss": 1.916, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.983221476510067e-05, |
| "loss": 1.9082, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 1.98255033557047e-05, |
| "loss": 1.9268, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.9818791946308725e-05, |
| "loss": 1.9287, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.9812080536912754e-05, |
| "loss": 1.8809, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.980536912751678e-05, |
| "loss": 1.915, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.9798657718120807e-05, |
| "loss": 1.9023, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.9791946308724836e-05, |
| "loss": 1.8613, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 1.978523489932886e-05, |
| "loss": 1.9043, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.977852348993289e-05, |
| "loss": 1.8428, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.9771812080536915e-05, |
| "loss": 1.8838, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.9765100671140943e-05, |
| "loss": 1.8564, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.975838926174497e-05, |
| "loss": 1.8945, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.9751677852348994e-05, |
| "loss": 1.8555, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 1.9744966442953022e-05, |
| "loss": 1.8662, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.9738255033557047e-05, |
| "loss": 1.8867, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.9731543624161076e-05, |
| "loss": 1.8467, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.97248322147651e-05, |
| "loss": 1.8311, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.971812080536913e-05, |
| "loss": 1.8408, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.9711409395973155e-05, |
| "loss": 1.8594, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 1.9704697986577183e-05, |
| "loss": 1.8652, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.969798657718121e-05, |
| "loss": 1.8018, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.9691275167785237e-05, |
| "loss": 1.8486, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.9684563758389262e-05, |
| "loss": 1.8818, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.967785234899329e-05, |
| "loss": 1.8164, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.9671140939597316e-05, |
| "loss": 1.8047, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 1.9664429530201344e-05, |
| "loss": 1.8262, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.9657718120805373e-05, |
| "loss": 1.8223, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.9651006711409398e-05, |
| "loss": 1.8008, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.9644295302013427e-05, |
| "loss": 1.7949, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.9637583892617452e-05, |
| "loss": 1.7822, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.963087248322148e-05, |
| "loss": 1.8193, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 1.9624161073825505e-05, |
| "loss": 1.7891, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.961744966442953e-05, |
| "loss": 1.7617, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.961073825503356e-05, |
| "loss": 1.792, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.9604026845637584e-05, |
| "loss": 1.792, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.9597315436241613e-05, |
| "loss": 1.8252, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.9590604026845638e-05, |
| "loss": 1.7949, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 1.9583892617449667e-05, |
| "loss": 1.7715, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.957718120805369e-05, |
| "loss": 1.7822, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.957046979865772e-05, |
| "loss": 1.8057, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.9563758389261745e-05, |
| "loss": 1.7852, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.9557046979865774e-05, |
| "loss": 1.7607, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.95503355704698e-05, |
| "loss": 1.75, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 1.9543624161073828e-05, |
| "loss": 1.7188, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9536912751677856e-05, |
| "loss": 1.7891, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.953020134228188e-05, |
| "loss": 1.7734, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.952348993288591e-05, |
| "loss": 1.7842, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9516778523489935e-05, |
| "loss": 1.752, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.9510067114093964e-05, |
| "loss": 1.7422, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 1.950335570469799e-05, |
| "loss": 1.748, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.9496644295302014e-05, |
| "loss": 1.7666, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.9489932885906042e-05, |
| "loss": 1.7305, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.9483221476510068e-05, |
| "loss": 1.7207, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.9476510067114096e-05, |
| "loss": 1.7725, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.946979865771812e-05, |
| "loss": 1.7646, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 1.946308724832215e-05, |
| "loss": 1.7256, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.9456375838926175e-05, |
| "loss": 1.7373, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.9449664429530204e-05, |
| "loss": 1.7402, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.944295302013423e-05, |
| "loss": 1.7324, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.9436241610738257e-05, |
| "loss": 1.7256, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.9429530201342282e-05, |
| "loss": 1.7295, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 1.942281879194631e-05, |
| "loss": 1.7295, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.9416107382550336e-05, |
| "loss": 1.7041, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.9409395973154365e-05, |
| "loss": 1.7354, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.9402684563758393e-05, |
| "loss": 1.7529, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.939597315436242e-05, |
| "loss": 1.7324, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.9389261744966447e-05, |
| "loss": 1.6885, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 1.9382550335570472e-05, |
| "loss": 1.7402, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.9375838926174497e-05, |
| "loss": 1.7109, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.9369127516778526e-05, |
| "loss": 1.7598, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.936241610738255e-05, |
| "loss": 1.7266, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.935570469798658e-05, |
| "loss": 1.7236, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.9348993288590605e-05, |
| "loss": 1.7314, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 1.9342281879194633e-05, |
| "loss": 1.7148, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.933557046979866e-05, |
| "loss": 1.7109, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.9328859060402687e-05, |
| "loss": 1.7148, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.9322147651006712e-05, |
| "loss": 1.7041, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.9315436241610737e-05, |
| "loss": 1.7422, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.9308724832214766e-05, |
| "loss": 1.7383, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 1.9302013422818794e-05, |
| "loss": 1.752, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.929530201342282e-05, |
| "loss": 1.6973, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.9288590604026848e-05, |
| "loss": 1.7168, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.9281879194630873e-05, |
| "loss": 1.7256, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.9275167785234902e-05, |
| "loss": 1.7139, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.926845637583893e-05, |
| "loss": 1.7207, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 1.9261744966442955e-05, |
| "loss": 1.7002, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.925503355704698e-05, |
| "loss": 1.7012, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.924832214765101e-05, |
| "loss": 1.7207, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.9241610738255034e-05, |
| "loss": 1.7217, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.9234899328859063e-05, |
| "loss": 1.7109, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.9228187919463088e-05, |
| "loss": 1.6758, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.9221476510067117e-05, |
| "loss": 1.6914, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.9214765100671142e-05, |
| "loss": 1.6953, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.920805369127517e-05, |
| "loss": 1.6943, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.9201342281879195e-05, |
| "loss": 1.6768, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.919463087248322e-05, |
| "loss": 1.6582, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.918791946308725e-05, |
| "loss": 1.7012, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.9181208053691274e-05, |
| "loss": 1.666, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.9174496644295303e-05, |
| "loss": 1.6816, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.916778523489933e-05, |
| "loss": 1.707, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.9161073825503357e-05, |
| "loss": 1.6396, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.9154362416107385e-05, |
| "loss": 1.6523, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.914765100671141e-05, |
| "loss": 1.6865, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.914093959731544e-05, |
| "loss": 1.667, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.9134228187919464e-05, |
| "loss": 1.7168, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.9127516778523493e-05, |
| "loss": 1.6641, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.9120805369127518e-05, |
| "loss": 1.6934, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.9114093959731546e-05, |
| "loss": 1.6934, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.910738255033557e-05, |
| "loss": 1.668, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.91006711409396e-05, |
| "loss": 1.6709, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.9093959731543625e-05, |
| "loss": 1.6826, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.9087248322147654e-05, |
| "loss": 1.6689, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.908053691275168e-05, |
| "loss": 1.6836, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.9073825503355704e-05, |
| "loss": 1.6992, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.9067114093959733e-05, |
| "loss": 1.6572, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.9060402684563758e-05, |
| "loss": 1.6562, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.9053691275167786e-05, |
| "loss": 1.6729, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.904697986577181e-05, |
| "loss": 1.6719, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.904026845637584e-05, |
| "loss": 1.666, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.903355704697987e-05, |
| "loss": 1.6963, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.9026845637583894e-05, |
| "loss": 1.6943, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.9020134228187922e-05, |
| "loss": 1.6455, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.9013422818791947e-05, |
| "loss": 1.6973, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.9006711409395976e-05, |
| "loss": 1.6689, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.9e-05, |
| "loss": 1.6504, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.899328859060403e-05, |
| "loss": 1.6074, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.8986577181208055e-05, |
| "loss": 1.6738, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.8979865771812083e-05, |
| "loss": 1.6318, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.897315436241611e-05, |
| "loss": 1.6357, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.8966442953020137e-05, |
| "loss": 1.6846, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.8959731543624162e-05, |
| "loss": 1.6729, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.895302013422819e-05, |
| "loss": 1.6553, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.8946308724832216e-05, |
| "loss": 1.6387, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.893959731543624e-05, |
| "loss": 1.6426, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.893288590604027e-05, |
| "loss": 1.6436, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.8926174496644295e-05, |
| "loss": 1.6699, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.8919463087248323e-05, |
| "loss": 1.6113, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.891275167785235e-05, |
| "loss": 1.6504, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.8906040268456377e-05, |
| "loss": 1.6514, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.8899328859060406e-05, |
| "loss": 1.6719, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.889261744966443e-05, |
| "loss": 1.6318, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.888590604026846e-05, |
| "loss": 1.6514, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.8879194630872484e-05, |
| "loss": 1.6641, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.8872483221476513e-05, |
| "loss": 1.6582, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.8865771812080538e-05, |
| "loss": 1.6553, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.8859060402684567e-05, |
| "loss": 1.6816, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.8852348993288592e-05, |
| "loss": 1.6309, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.884563758389262e-05, |
| "loss": 1.6309, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.8838926174496646e-05, |
| "loss": 1.6299, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.8832214765100674e-05, |
| "loss": 1.6885, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.88255033557047e-05, |
| "loss": 1.6416, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.8818791946308724e-05, |
| "loss": 1.6377, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.8812080536912753e-05, |
| "loss": 1.6279, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.8805369127516778e-05, |
| "loss": 1.6309, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.8798657718120807e-05, |
| "loss": 1.6562, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.8791946308724832e-05, |
| "loss": 1.6211, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.878523489932886e-05, |
| "loss": 1.6279, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.877852348993289e-05, |
| "loss": 1.6387, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.8771812080536914e-05, |
| "loss": 1.6162, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.8765100671140943e-05, |
| "loss": 1.6562, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.8758389261744968e-05, |
| "loss": 1.6309, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.8751677852348996e-05, |
| "loss": 1.6416, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.874496644295302e-05, |
| "loss": 1.6475, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.873825503355705e-05, |
| "loss": 1.6172, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.8731543624161075e-05, |
| "loss": 1.6035, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.8724832214765104e-05, |
| "loss": 1.6504, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.871812080536913e-05, |
| "loss": 1.6055, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.8711409395973157e-05, |
| "loss": 1.6201, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.8704697986577183e-05, |
| "loss": 1.623, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.8697986577181208e-05, |
| "loss": 1.6113, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.8691275167785236e-05, |
| "loss": 1.6074, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.868456375838926e-05, |
| "loss": 1.6191, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.867785234899329e-05, |
| "loss": 1.6152, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.8671140939597315e-05, |
| "loss": 1.623, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.8664429530201344e-05, |
| "loss": 1.5898, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.865771812080537e-05, |
| "loss": 1.6094, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.8651006711409397e-05, |
| "loss": 1.6035, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.8644295302013426e-05, |
| "loss": 1.6348, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.863758389261745e-05, |
| "loss": 1.6211, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.863087248322148e-05, |
| "loss": 1.623, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.8624161073825505e-05, |
| "loss": 1.6201, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.8617449664429533e-05, |
| "loss": 1.6133, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.861073825503356e-05, |
| "loss": 1.6445, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.8604026845637587e-05, |
| "loss": 1.6045, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.8597315436241612e-05, |
| "loss": 1.6084, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.859060402684564e-05, |
| "loss": 1.6133, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.8583892617449666e-05, |
| "loss": 1.627, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.857718120805369e-05, |
| "loss": 1.6299, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.857046979865772e-05, |
| "loss": 1.6172, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.8563758389261745e-05, |
| "loss": 1.6074, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.8557046979865773e-05, |
| "loss": 1.6094, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.85503355704698e-05, |
| "loss": 1.5938, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.8543624161073827e-05, |
| "loss": 1.5898, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.8536912751677852e-05, |
| "loss": 1.6191, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.853020134228188e-05, |
| "loss": 1.6123, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.8523489932885906e-05, |
| "loss": 1.6191, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.8516778523489934e-05, |
| "loss": 1.5947, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.8510067114093963e-05, |
| "loss": 1.6074, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.8503355704697988e-05, |
| "loss": 1.5938, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.8496644295302017e-05, |
| "loss": 1.6309, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.8489932885906042e-05, |
| "loss": 1.5938, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.848322147651007e-05, |
| "loss": 1.6084, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.8476510067114096e-05, |
| "loss": 1.5771, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.8469798657718124e-05, |
| "loss": 1.6084, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 1.846308724832215e-05, |
| "loss": 1.6152, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.8456375838926174e-05, |
| "loss": 1.5586, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.8449664429530203e-05, |
| "loss": 1.6191, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.8442953020134228e-05, |
| "loss": 1.5918, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.8436241610738257e-05, |
| "loss": 1.5742, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.8429530201342282e-05, |
| "loss": 1.6377, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 1.842281879194631e-05, |
| "loss": 1.5625, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.8416107382550336e-05, |
| "loss": 1.5625, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.8409395973154364e-05, |
| "loss": 1.6221, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.840268456375839e-05, |
| "loss": 1.6211, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.8395973154362418e-05, |
| "loss": 1.6406, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.8389261744966443e-05, |
| "loss": 1.6113, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 1.838255033557047e-05, |
| "loss": 1.5693, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.83758389261745e-05, |
| "loss": 1.6133, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.8369127516778525e-05, |
| "loss": 1.5957, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.8362416107382554e-05, |
| "loss": 1.5664, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.835570469798658e-05, |
| "loss": 1.5762, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.8348993288590607e-05, |
| "loss": 1.6045, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 1.8342281879194633e-05, |
| "loss": 1.583, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.833557046979866e-05, |
| "loss": 1.6094, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.8328859060402686e-05, |
| "loss": 1.5615, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.832214765100671e-05, |
| "loss": 1.5488, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.831543624161074e-05, |
| "loss": 1.585, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.8308724832214765e-05, |
| "loss": 1.6123, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 1.8302013422818794e-05, |
| "loss": 1.5635, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.829530201342282e-05, |
| "loss": 1.6162, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.8288590604026847e-05, |
| "loss": 1.5947, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.8281879194630873e-05, |
| "loss": 1.5879, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.82751677852349e-05, |
| "loss": 1.5596, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.8268456375838926e-05, |
| "loss": 1.5723, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 1.8261744966442955e-05, |
| "loss": 1.5781, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.825503355704698e-05, |
| "loss": 1.5986, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.824832214765101e-05, |
| "loss": 1.6143, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.8241610738255037e-05, |
| "loss": 1.5732, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.8234899328859062e-05, |
| "loss": 1.5947, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.822818791946309e-05, |
| "loss": 1.543, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 1.8221476510067116e-05, |
| "loss": 1.5898, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.8214765100671145e-05, |
| "loss": 1.5713, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.820805369127517e-05, |
| "loss": 1.5654, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.8201342281879195e-05, |
| "loss": 1.6006, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.8194630872483223e-05, |
| "loss": 1.5898, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.818791946308725e-05, |
| "loss": 1.5898, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 1.8181208053691277e-05, |
| "loss": 1.5596, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.8174496644295302e-05, |
| "loss": 1.5527, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.816778523489933e-05, |
| "loss": 1.5957, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.8161073825503356e-05, |
| "loss": 1.5361, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.8154362416107385e-05, |
| "loss": 1.582, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.814765100671141e-05, |
| "loss": 1.5859, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 1.8140939597315438e-05, |
| "loss": 1.5869, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.8134228187919463e-05, |
| "loss": 1.5752, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.8127516778523492e-05, |
| "loss": 1.5791, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.8120805369127517e-05, |
| "loss": 1.5332, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.8114093959731546e-05, |
| "loss": 1.5488, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.8107382550335574e-05, |
| "loss": 1.5518, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 1.81006711409396e-05, |
| "loss": 1.5508, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.8093959731543628e-05, |
| "loss": 1.5801, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.8087248322147653e-05, |
| "loss": 1.5674, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.8080536912751678e-05, |
| "loss": 1.5693, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.8073825503355707e-05, |
| "loss": 1.583, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.8067114093959732e-05, |
| "loss": 1.5605, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 1.806040268456376e-05, |
| "loss": 1.5576, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.8053691275167786e-05, |
| "loss": 1.5791, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.8046979865771814e-05, |
| "loss": 1.5566, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.804026845637584e-05, |
| "loss": 1.5762, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.8033557046979868e-05, |
| "loss": 1.5576, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.8026845637583893e-05, |
| "loss": 1.6016, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 1.802013422818792e-05, |
| "loss": 1.5186, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.8013422818791947e-05, |
| "loss": 1.5898, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.8006711409395975e-05, |
| "loss": 1.5869, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.8e-05, |
| "loss": 1.5303, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.799328859060403e-05, |
| "loss": 1.5732, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 1.7986577181208054e-05, |
| "loss": 1.5566, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.7979865771812083e-05, |
| "loss": 1.5469, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.797315436241611e-05, |
| "loss": 1.5459, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.7966442953020136e-05, |
| "loss": 1.5518, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.795973154362416e-05, |
| "loss": 1.5879, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.795302013422819e-05, |
| "loss": 1.5488, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 1.7946308724832215e-05, |
| "loss": 1.5508, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.7939597315436244e-05, |
| "loss": 1.5488, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.793288590604027e-05, |
| "loss": 1.5547, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.7926174496644298e-05, |
| "loss": 1.5498, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.7919463087248323e-05, |
| "loss": 1.5605, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.791275167785235e-05, |
| "loss": 1.5693, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 1.7906040268456376e-05, |
| "loss": 1.5371, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.78993288590604e-05, |
| "loss": 1.5762, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.789261744966443e-05, |
| "loss": 1.5371, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.788590604026846e-05, |
| "loss": 1.5596, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.7879194630872484e-05, |
| "loss": 1.5146, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.7872483221476512e-05, |
| "loss": 1.5674, |
| "step": 317 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 1.7865771812080538e-05, |
| "loss": 1.5654, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.7859060402684566e-05, |
| "loss": 1.5547, |
| "step": 319 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.7852348993288595e-05, |
| "loss": 1.5361, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.784563758389262e-05, |
| "loss": 1.5225, |
| "step": 321 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.7838926174496645e-05, |
| "loss": 1.5225, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.7832214765100673e-05, |
| "loss": 1.5645, |
| "step": 323 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 1.78255033557047e-05, |
| "loss": 1.5615, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.7818791946308727e-05, |
| "loss": 1.5537, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.7812080536912752e-05, |
| "loss": 1.5391, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.780536912751678e-05, |
| "loss": 1.5352, |
| "step": 327 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.7798657718120806e-05, |
| "loss": 1.5713, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.7791946308724835e-05, |
| "loss": 1.5293, |
| "step": 329 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 1.778523489932886e-05, |
| "loss": 1.5908, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.7778523489932888e-05, |
| "loss": 1.5752, |
| "step": 331 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.7771812080536913e-05, |
| "loss": 1.5898, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.776510067114094e-05, |
| "loss": 1.5244, |
| "step": 333 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.7758389261744967e-05, |
| "loss": 1.5547, |
| "step": 334 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.7751677852348996e-05, |
| "loss": 1.5127, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 1.774496644295302e-05, |
| "loss": 1.5713, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.773825503355705e-05, |
| "loss": 1.5254, |
| "step": 337 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.7731543624161075e-05, |
| "loss": 1.5527, |
| "step": 338 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.7724832214765103e-05, |
| "loss": 1.5752, |
| "step": 339 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.771812080536913e-05, |
| "loss": 1.5762, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.7711409395973157e-05, |
| "loss": 1.54, |
| "step": 341 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 1.7704697986577182e-05, |
| "loss": 1.5596, |
| "step": 342 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.769798657718121e-05, |
| "loss": 1.5293, |
| "step": 343 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.7691275167785236e-05, |
| "loss": 1.542, |
| "step": 344 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.7684563758389264e-05, |
| "loss": 1.5518, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.767785234899329e-05, |
| "loss": 1.5537, |
| "step": 346 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.7671140939597318e-05, |
| "loss": 1.5498, |
| "step": 347 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 1.7664429530201343e-05, |
| "loss": 1.5557, |
| "step": 348 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.765771812080537e-05, |
| "loss": 1.5742, |
| "step": 349 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.7651006711409397e-05, |
| "loss": 1.5615, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.7644295302013422e-05, |
| "loss": 1.5479, |
| "step": 351 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.763758389261745e-05, |
| "loss": 1.5693, |
| "step": 352 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.7630872483221476e-05, |
| "loss": 1.5508, |
| "step": 353 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 1.7624161073825504e-05, |
| "loss": 1.543, |
| "step": 354 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.7617449664429533e-05, |
| "loss": 1.5479, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.7610738255033558e-05, |
| "loss": 1.499, |
| "step": 356 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.7604026845637586e-05, |
| "loss": 1.5459, |
| "step": 357 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.759731543624161e-05, |
| "loss": 1.5107, |
| "step": 358 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.759060402684564e-05, |
| "loss": 1.5137, |
| "step": 359 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 1.7583892617449665e-05, |
| "loss": 1.5068, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.7577181208053694e-05, |
| "loss": 1.5156, |
| "step": 361 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.757046979865772e-05, |
| "loss": 1.5439, |
| "step": 362 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.7563758389261748e-05, |
| "loss": 1.541, |
| "step": 363 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.7557046979865773e-05, |
| "loss": 1.5576, |
| "step": 364 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.75503355704698e-05, |
| "loss": 1.5303, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 1.7543624161073826e-05, |
| "loss": 1.5195, |
| "step": 366 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.7536912751677855e-05, |
| "loss": 1.5078, |
| "step": 367 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.753020134228188e-05, |
| "loss": 1.5244, |
| "step": 368 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.7523489932885905e-05, |
| "loss": 1.5332, |
| "step": 369 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.7516778523489934e-05, |
| "loss": 1.5, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.751006711409396e-05, |
| "loss": 1.5293, |
| "step": 371 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 1.7503355704697988e-05, |
| "loss": 1.5342, |
| "step": 372 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.7496644295302013e-05, |
| "loss": 1.5518, |
| "step": 373 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.748993288590604e-05, |
| "loss": 1.5322, |
| "step": 374 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.748322147651007e-05, |
| "loss": 1.5186, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.7476510067114095e-05, |
| "loss": 1.5693, |
| "step": 376 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.7469798657718124e-05, |
| "loss": 1.54, |
| "step": 377 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 1.746308724832215e-05, |
| "loss": 1.5264, |
| "step": 378 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.7456375838926177e-05, |
| "loss": 1.5156, |
| "step": 379 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.7449664429530202e-05, |
| "loss": 1.5303, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.744295302013423e-05, |
| "loss": 1.5361, |
| "step": 381 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.7436241610738256e-05, |
| "loss": 1.5371, |
| "step": 382 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.7429530201342285e-05, |
| "loss": 1.5693, |
| "step": 383 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 1.742281879194631e-05, |
| "loss": 1.5029, |
| "step": 384 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.741610738255034e-05, |
| "loss": 1.502, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.7409395973154364e-05, |
| "loss": 1.5195, |
| "step": 386 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.740268456375839e-05, |
| "loss": 1.5137, |
| "step": 387 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.7395973154362417e-05, |
| "loss": 1.4873, |
| "step": 388 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.7389261744966442e-05, |
| "loss": 1.5283, |
| "step": 389 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 1.738255033557047e-05, |
| "loss": 1.5342, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.7375838926174496e-05, |
| "loss": 1.5342, |
| "step": 391 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.7369127516778525e-05, |
| "loss": 1.5508, |
| "step": 392 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.736241610738255e-05, |
| "loss": 1.543, |
| "step": 393 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.735570469798658e-05, |
| "loss": 1.5322, |
| "step": 394 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.7348993288590607e-05, |
| "loss": 1.5195, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 1.7342281879194632e-05, |
| "loss": 1.502, |
| "step": 396 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.733557046979866e-05, |
| "loss": 1.5439, |
| "step": 397 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.7328859060402686e-05, |
| "loss": 1.5195, |
| "step": 398 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.7322147651006714e-05, |
| "loss": 1.5273, |
| "step": 399 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.731543624161074e-05, |
| "loss": 1.5264, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.7308724832214768e-05, |
| "loss": 1.5322, |
| "step": 401 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 1.7302013422818793e-05, |
| "loss": 1.5088, |
| "step": 402 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.7295302013422822e-05, |
| "loss": 1.5459, |
| "step": 403 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.7288590604026847e-05, |
| "loss": 1.5195, |
| "step": 404 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.7281879194630872e-05, |
| "loss": 1.5137, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.72751677852349e-05, |
| "loss": 1.5127, |
| "step": 406 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.7268456375838926e-05, |
| "loss": 1.4805, |
| "step": 407 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 1.7261744966442954e-05, |
| "loss": 1.5078, |
| "step": 408 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.725503355704698e-05, |
| "loss": 1.5283, |
| "step": 409 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.7248322147651008e-05, |
| "loss": 1.5166, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.7241610738255033e-05, |
| "loss": 1.5303, |
| "step": 411 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.7234899328859062e-05, |
| "loss": 1.5566, |
| "step": 412 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.7228187919463087e-05, |
| "loss": 1.5303, |
| "step": 413 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 1.7221476510067115e-05, |
| "loss": 1.5332, |
| "step": 414 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.7214765100671144e-05, |
| "loss": 1.5107, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.720805369127517e-05, |
| "loss": 1.4844, |
| "step": 416 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.7201342281879198e-05, |
| "loss": 1.4688, |
| "step": 417 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.7194630872483223e-05, |
| "loss": 1.5088, |
| "step": 418 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.718791946308725e-05, |
| "loss": 1.5117, |
| "step": 419 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 1.7181208053691277e-05, |
| "loss": 1.4834, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.7174496644295305e-05, |
| "loss": 1.502, |
| "step": 421 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.716778523489933e-05, |
| "loss": 1.5293, |
| "step": 422 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.716107382550336e-05, |
| "loss": 1.5322, |
| "step": 423 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.7154362416107384e-05, |
| "loss": 1.4775, |
| "step": 424 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.714765100671141e-05, |
| "loss": 1.5391, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 1.7140939597315438e-05, |
| "loss": 1.5107, |
| "step": 426 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.7134228187919463e-05, |
| "loss": 1.5264, |
| "step": 427 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.712751677852349e-05, |
| "loss": 1.5322, |
| "step": 428 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.7120805369127517e-05, |
| "loss": 1.5273, |
| "step": 429 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.7114093959731545e-05, |
| "loss": 1.4883, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.710738255033557e-05, |
| "loss": 1.5039, |
| "step": 431 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 1.71006711409396e-05, |
| "loss": 1.5215, |
| "step": 432 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.7093959731543627e-05, |
| "loss": 1.5068, |
| "step": 433 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.7087248322147652e-05, |
| "loss": 1.5186, |
| "step": 434 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.708053691275168e-05, |
| "loss": 1.5225, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.7073825503355706e-05, |
| "loss": 1.4824, |
| "step": 436 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.7067114093959735e-05, |
| "loss": 1.5449, |
| "step": 437 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 1.706040268456376e-05, |
| "loss": 1.5342, |
| "step": 438 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.705369127516779e-05, |
| "loss": 1.5176, |
| "step": 439 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.7046979865771814e-05, |
| "loss": 1.542, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.7040268456375842e-05, |
| "loss": 1.4697, |
| "step": 441 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.7033557046979867e-05, |
| "loss": 1.4873, |
| "step": 442 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.7026845637583892e-05, |
| "loss": 1.5039, |
| "step": 443 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 1.702013422818792e-05, |
| "loss": 1.4824, |
| "step": 444 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.7013422818791946e-05, |
| "loss": 1.4854, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.7006711409395975e-05, |
| "loss": 1.5547, |
| "step": 446 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.7e-05, |
| "loss": 1.5449, |
| "step": 447 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.699328859060403e-05, |
| "loss": 1.501, |
| "step": 448 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 1.6986577181208054e-05, |
| "loss": 1.4893, |
| "step": 449 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.6979865771812082e-05, |
| "loss": 1.5391, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.6973154362416107e-05, |
| "loss": 1.5449, |
| "step": 451 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.6966442953020136e-05, |
| "loss": 1.4883, |
| "step": 452 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.6959731543624164e-05, |
| "loss": 1.4844, |
| "step": 453 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.695302013422819e-05, |
| "loss": 1.4785, |
| "step": 454 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 1.6946308724832218e-05, |
| "loss": 1.4785, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.6939597315436243e-05, |
| "loss": 1.4854, |
| "step": 456 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.6932885906040272e-05, |
| "loss": 1.5156, |
| "step": 457 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.6926174496644297e-05, |
| "loss": 1.5117, |
| "step": 458 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.6919463087248325e-05, |
| "loss": 1.5088, |
| "step": 459 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.691275167785235e-05, |
| "loss": 1.5137, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 1.6906040268456376e-05, |
| "loss": 1.4902, |
| "step": 461 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.6899328859060404e-05, |
| "loss": 1.5186, |
| "step": 462 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.689261744966443e-05, |
| "loss": 1.5039, |
| "step": 463 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.6885906040268458e-05, |
| "loss": 1.5264, |
| "step": 464 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.6879194630872483e-05, |
| "loss": 1.5234, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.6872483221476512e-05, |
| "loss": 1.5166, |
| "step": 466 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 1.6865771812080537e-05, |
| "loss": 1.5215, |
| "step": 467 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.6859060402684565e-05, |
| "loss": 1.502, |
| "step": 468 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.685234899328859e-05, |
| "loss": 1.4756, |
| "step": 469 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.684563758389262e-05, |
| "loss": 1.4951, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.6838926174496644e-05, |
| "loss": 1.5098, |
| "step": 471 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.6832214765100673e-05, |
| "loss": 1.5156, |
| "step": 472 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 1.68255033557047e-05, |
| "loss": 1.5039, |
| "step": 473 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.6818791946308727e-05, |
| "loss": 1.499, |
| "step": 474 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.6812080536912755e-05, |
| "loss": 1.5068, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.680536912751678e-05, |
| "loss": 1.4727, |
| "step": 476 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.679865771812081e-05, |
| "loss": 1.4961, |
| "step": 477 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.6791946308724834e-05, |
| "loss": 1.5098, |
| "step": 478 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 1.678523489932886e-05, |
| "loss": 1.501, |
| "step": 479 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.6778523489932888e-05, |
| "loss": 1.5137, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.6771812080536913e-05, |
| "loss": 1.4844, |
| "step": 481 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.676510067114094e-05, |
| "loss": 1.4912, |
| "step": 482 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.6758389261744967e-05, |
| "loss": 1.4805, |
| "step": 483 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.6751677852348995e-05, |
| "loss": 1.4717, |
| "step": 484 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 1.674496644295302e-05, |
| "loss": 1.4785, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.673825503355705e-05, |
| "loss": 1.4814, |
| "step": 486 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.6731543624161074e-05, |
| "loss": 1.4893, |
| "step": 487 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.6724832214765103e-05, |
| "loss": 1.4873, |
| "step": 488 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.6718120805369128e-05, |
| "loss": 1.501, |
| "step": 489 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.6711409395973156e-05, |
| "loss": 1.4814, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 1.670469798657718e-05, |
| "loss": 1.4824, |
| "step": 491 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.669798657718121e-05, |
| "loss": 1.4775, |
| "step": 492 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.669127516778524e-05, |
| "loss": 1.5049, |
| "step": 493 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.6684563758389264e-05, |
| "loss": 1.4893, |
| "step": 494 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.6677852348993292e-05, |
| "loss": 1.4971, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.6671140939597317e-05, |
| "loss": 1.4473, |
| "step": 496 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 1.6664429530201343e-05, |
| "loss": 1.4912, |
| "step": 497 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.665771812080537e-05, |
| "loss": 1.4766, |
| "step": 498 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.6651006711409396e-05, |
| "loss": 1.4648, |
| "step": 499 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.6644295302013425e-05, |
| "loss": 1.4834, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.663758389261745e-05, |
| "loss": 1.4961, |
| "step": 501 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.663087248322148e-05, |
| "loss": 1.5107, |
| "step": 502 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 1.6624161073825504e-05, |
| "loss": 1.4971, |
| "step": 503 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.6617449664429532e-05, |
| "loss": 1.4541, |
| "step": 504 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.6610738255033557e-05, |
| "loss": 1.459, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.6604026845637583e-05, |
| "loss": 1.5039, |
| "step": 506 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.659731543624161e-05, |
| "loss": 1.5361, |
| "step": 507 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.659060402684564e-05, |
| "loss": 1.4707, |
| "step": 508 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 1.6583892617449665e-05, |
| "loss": 1.5107, |
| "step": 509 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.6577181208053693e-05, |
| "loss": 1.4648, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.657046979865772e-05, |
| "loss": 1.4688, |
| "step": 511 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.6563758389261747e-05, |
| "loss": 1.4824, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.6557046979865776e-05, |
| "loss": 1.4814, |
| "step": 513 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.65503355704698e-05, |
| "loss": 1.5088, |
| "step": 514 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 1.654362416107383e-05, |
| "loss": 1.4873, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.6536912751677854e-05, |
| "loss": 1.4785, |
| "step": 516 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.653020134228188e-05, |
| "loss": 1.5049, |
| "step": 517 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.6523489932885908e-05, |
| "loss": 1.4951, |
| "step": 518 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.6516778523489933e-05, |
| "loss": 1.5146, |
| "step": 519 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.6510067114093962e-05, |
| "loss": 1.4922, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 1.6503355704697987e-05, |
| "loss": 1.4834, |
| "step": 521 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.6496644295302016e-05, |
| "loss": 1.5186, |
| "step": 522 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.648993288590604e-05, |
| "loss": 1.4834, |
| "step": 523 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.648322147651007e-05, |
| "loss": 1.4893, |
| "step": 524 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.6476510067114094e-05, |
| "loss": 1.5342, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.646979865771812e-05, |
| "loss": 1.5518, |
| "step": 526 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 1.6463087248322148e-05, |
| "loss": 1.4844, |
| "step": 527 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.6456375838926177e-05, |
| "loss": 1.4883, |
| "step": 528 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.6449664429530202e-05, |
| "loss": 1.4971, |
| "step": 529 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.644295302013423e-05, |
| "loss": 1.4521, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.6436241610738256e-05, |
| "loss": 1.4619, |
| "step": 531 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.6429530201342284e-05, |
| "loss": 1.4629, |
| "step": 532 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 1.6422818791946313e-05, |
| "loss": 1.4932, |
| "step": 533 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.6416107382550338e-05, |
| "loss": 1.4854, |
| "step": 534 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.6409395973154363e-05, |
| "loss": 1.5, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.640268456375839e-05, |
| "loss": 1.4717, |
| "step": 536 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.6395973154362417e-05, |
| "loss": 1.4551, |
| "step": 537 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.6389261744966445e-05, |
| "loss": 1.4727, |
| "step": 538 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 1.638255033557047e-05, |
| "loss": 1.4922, |
| "step": 539 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.63758389261745e-05, |
| "loss": 1.46, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.6369127516778524e-05, |
| "loss": 1.4824, |
| "step": 541 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.6362416107382553e-05, |
| "loss": 1.4951, |
| "step": 542 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.6355704697986578e-05, |
| "loss": 1.4873, |
| "step": 543 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.6348993288590603e-05, |
| "loss": 1.4697, |
| "step": 544 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 1.634228187919463e-05, |
| "loss": 1.4941, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.633557046979866e-05, |
| "loss": 1.4453, |
| "step": 546 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.6328859060402685e-05, |
| "loss": 1.4668, |
| "step": 547 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.6322147651006714e-05, |
| "loss": 1.4766, |
| "step": 548 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.631543624161074e-05, |
| "loss": 1.4727, |
| "step": 549 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.6308724832214767e-05, |
| "loss": 1.4668, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 1.6302013422818796e-05, |
| "loss": 1.4824, |
| "step": 551 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.629530201342282e-05, |
| "loss": 1.4932, |
| "step": 552 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.6288590604026846e-05, |
| "loss": 1.4316, |
| "step": 553 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.6281879194630875e-05, |
| "loss": 1.4824, |
| "step": 554 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.62751677852349e-05, |
| "loss": 1.4814, |
| "step": 555 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.626845637583893e-05, |
| "loss": 1.4639, |
| "step": 556 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 1.6261744966442954e-05, |
| "loss": 1.5029, |
| "step": 557 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.6255033557046982e-05, |
| "loss": 1.46, |
| "step": 558 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.6248322147651007e-05, |
| "loss": 1.4814, |
| "step": 559 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.6241610738255036e-05, |
| "loss": 1.4717, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.623489932885906e-05, |
| "loss": 1.4531, |
| "step": 561 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.6228187919463086e-05, |
| "loss": 1.4609, |
| "step": 562 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 1.6221476510067115e-05, |
| "loss": 1.4883, |
| "step": 563 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.621476510067114e-05, |
| "loss": 1.4639, |
| "step": 564 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.620805369127517e-05, |
| "loss": 1.4531, |
| "step": 565 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.6201342281879197e-05, |
| "loss": 1.4756, |
| "step": 566 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.6194630872483222e-05, |
| "loss": 1.4756, |
| "step": 567 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.618791946308725e-05, |
| "loss": 1.502, |
| "step": 568 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 1.6181208053691276e-05, |
| "loss": 1.4443, |
| "step": 569 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.6174496644295304e-05, |
| "loss": 1.4619, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.616778523489933e-05, |
| "loss": 1.4883, |
| "step": 571 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.6161073825503358e-05, |
| "loss": 1.4717, |
| "step": 572 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.6154362416107383e-05, |
| "loss": 1.4814, |
| "step": 573 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.6147651006711412e-05, |
| "loss": 1.4775, |
| "step": 574 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 1.6140939597315437e-05, |
| "loss": 1.4531, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.6134228187919466e-05, |
| "loss": 1.4785, |
| "step": 576 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.612751677852349e-05, |
| "loss": 1.4883, |
| "step": 577 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.612080536912752e-05, |
| "loss": 1.4482, |
| "step": 578 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.6114093959731544e-05, |
| "loss": 1.4717, |
| "step": 579 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.610738255033557e-05, |
| "loss": 1.4443, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 1.6100671140939598e-05, |
| "loss": 1.4189, |
| "step": 581 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.6093959731543623e-05, |
| "loss": 1.4883, |
| "step": 582 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.6087248322147652e-05, |
| "loss": 1.4287, |
| "step": 583 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.6080536912751677e-05, |
| "loss": 1.4482, |
| "step": 584 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.6073825503355706e-05, |
| "loss": 1.4844, |
| "step": 585 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.6067114093959734e-05, |
| "loss": 1.4863, |
| "step": 586 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 1.606040268456376e-05, |
| "loss": 1.4268, |
| "step": 587 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.6053691275167788e-05, |
| "loss": 1.4834, |
| "step": 588 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.6046979865771813e-05, |
| "loss": 1.4795, |
| "step": 589 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.604026845637584e-05, |
| "loss": 1.4561, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.6033557046979867e-05, |
| "loss": 1.4756, |
| "step": 591 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.6026845637583895e-05, |
| "loss": 1.4619, |
| "step": 592 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 1.602013422818792e-05, |
| "loss": 1.4912, |
| "step": 593 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.601342281879195e-05, |
| "loss": 1.4395, |
| "step": 594 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.6006711409395974e-05, |
| "loss": 1.4707, |
| "step": 595 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.6000000000000003e-05, |
| "loss": 1.4609, |
| "step": 596 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.5993288590604028e-05, |
| "loss": 1.4082, |
| "step": 597 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 1.5986577181208053e-05, |
| "loss": 1.4561, |
| "step": 598 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.597986577181208e-05, |
| "loss": 1.4609, |
| "step": 599 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.5973154362416107e-05, |
| "loss": 1.4355, |
| "step": 600 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.5966442953020135e-05, |
| "loss": 1.4502, |
| "step": 601 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.595973154362416e-05, |
| "loss": 1.4512, |
| "step": 602 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.595302013422819e-05, |
| "loss": 1.458, |
| "step": 603 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 1.5946308724832214e-05, |
| "loss": 1.4502, |
| "step": 604 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.5939597315436243e-05, |
| "loss": 1.4434, |
| "step": 605 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.593288590604027e-05, |
| "loss": 1.4023, |
| "step": 606 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.5926174496644296e-05, |
| "loss": 1.4561, |
| "step": 607 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.5919463087248325e-05, |
| "loss": 1.4795, |
| "step": 608 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.591275167785235e-05, |
| "loss": 1.4512, |
| "step": 609 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 1.590604026845638e-05, |
| "loss": 1.4668, |
| "step": 610 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.5899328859060404e-05, |
| "loss": 1.4648, |
| "step": 611 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.5892617449664432e-05, |
| "loss": 1.4502, |
| "step": 612 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.5885906040268457e-05, |
| "loss": 1.4414, |
| "step": 613 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.5879194630872486e-05, |
| "loss": 1.4463, |
| "step": 614 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.587248322147651e-05, |
| "loss": 1.4482, |
| "step": 615 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 1.586577181208054e-05, |
| "loss": 1.4209, |
| "step": 616 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.5859060402684565e-05, |
| "loss": 1.4639, |
| "step": 617 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.585234899328859e-05, |
| "loss": 1.4805, |
| "step": 618 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.584563758389262e-05, |
| "loss": 1.459, |
| "step": 619 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.5838926174496644e-05, |
| "loss": 1.4609, |
| "step": 620 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.5832214765100672e-05, |
| "loss": 1.4023, |
| "step": 621 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 1.5825503355704697e-05, |
| "loss": 1.4629, |
| "step": 622 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.5818791946308726e-05, |
| "loss": 1.4424, |
| "step": 623 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.581208053691275e-05, |
| "loss": 1.4395, |
| "step": 624 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.580536912751678e-05, |
| "loss": 1.459, |
| "step": 625 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.5798657718120808e-05, |
| "loss": 1.4756, |
| "step": 626 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.5791946308724833e-05, |
| "loss": 1.4492, |
| "step": 627 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 1.5785234899328862e-05, |
| "loss": 1.4277, |
| "step": 628 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.5778523489932887e-05, |
| "loss": 1.4834, |
| "step": 629 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.5771812080536916e-05, |
| "loss": 1.4346, |
| "step": 630 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.576510067114094e-05, |
| "loss": 1.4307, |
| "step": 631 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.575838926174497e-05, |
| "loss": 1.4287, |
| "step": 632 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.5751677852348995e-05, |
| "loss": 1.4248, |
| "step": 633 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 1.5744966442953023e-05, |
| "loss": 1.4287, |
| "step": 634 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.5738255033557048e-05, |
| "loss": 1.4404, |
| "step": 635 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.5731543624161073e-05, |
| "loss": 1.4414, |
| "step": 636 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.5724832214765102e-05, |
| "loss": 1.4043, |
| "step": 637 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.5718120805369127e-05, |
| "loss": 1.4316, |
| "step": 638 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.5711409395973156e-05, |
| "loss": 1.4111, |
| "step": 639 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 1.570469798657718e-05, |
| "loss": 1.4805, |
| "step": 640 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.569798657718121e-05, |
| "loss": 1.4727, |
| "step": 641 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.5691275167785235e-05, |
| "loss": 1.4795, |
| "step": 642 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.5684563758389263e-05, |
| "loss": 1.4385, |
| "step": 643 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.5677852348993288e-05, |
| "loss": 1.4668, |
| "step": 644 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.5671140939597317e-05, |
| "loss": 1.4541, |
| "step": 645 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 1.5664429530201345e-05, |
| "loss": 1.458, |
| "step": 646 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.565771812080537e-05, |
| "loss": 1.4424, |
| "step": 647 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.56510067114094e-05, |
| "loss": 1.4287, |
| "step": 648 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.5644295302013424e-05, |
| "loss": 1.4531, |
| "step": 649 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.5637583892617453e-05, |
| "loss": 1.4463, |
| "step": 650 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.5630872483221478e-05, |
| "loss": 1.4355, |
| "step": 651 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 1.5624161073825506e-05, |
| "loss": 1.4092, |
| "step": 652 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.561744966442953e-05, |
| "loss": 1.4473, |
| "step": 653 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.5610738255033557e-05, |
| "loss": 1.458, |
| "step": 654 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.5604026845637585e-05, |
| "loss": 1.4482, |
| "step": 655 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.559731543624161e-05, |
| "loss": 1.4219, |
| "step": 656 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.559060402684564e-05, |
| "loss": 1.4707, |
| "step": 657 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 1.5583892617449664e-05, |
| "loss": 1.4297, |
| "step": 658 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.5577181208053693e-05, |
| "loss": 1.4619, |
| "step": 659 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.5570469798657718e-05, |
| "loss": 1.4307, |
| "step": 660 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.5563758389261746e-05, |
| "loss": 1.457, |
| "step": 661 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.555704697986577e-05, |
| "loss": 1.418, |
| "step": 662 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.55503355704698e-05, |
| "loss": 1.4629, |
| "step": 663 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 1.554362416107383e-05, |
| "loss": 1.4502, |
| "step": 664 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.5536912751677854e-05, |
| "loss": 1.4541, |
| "step": 665 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.5530201342281882e-05, |
| "loss": 1.4404, |
| "step": 666 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.5523489932885908e-05, |
| "loss": 1.4365, |
| "step": 667 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.5516778523489936e-05, |
| "loss": 1.4111, |
| "step": 668 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.551006711409396e-05, |
| "loss": 1.4443, |
| "step": 669 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 1.550335570469799e-05, |
| "loss": 1.4346, |
| "step": 670 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 1.5496644295302015e-05, |
| "loss": 1.4443, |
| "step": 671 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 1.548993288590604e-05, |
| "loss": 1.4111, |
| "step": 672 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 1.548322147651007e-05, |
| "loss": 1.4531, |
| "step": 673 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 1.5476510067114094e-05, |
| "loss": 1.4316, |
| "step": 674 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 1.5469798657718122e-05, |
| "loss": 1.4131, |
| "step": 675 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 1.5463087248322148e-05, |
| "loss": 1.4365, |
| "step": 676 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 1.5456375838926176e-05, |
| "loss": 1.458, |
| "step": 677 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 1.54496644295302e-05, |
| "loss": 1.4688, |
| "step": 678 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 1.544295302013423e-05, |
| "loss": 1.458, |
| "step": 679 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 1.5436241610738255e-05, |
| "loss": 1.3867, |
| "step": 680 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 1.5429530201342283e-05, |
| "loss": 1.3994, |
| "step": 681 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 1.542281879194631e-05, |
| "loss": 1.4121, |
| "step": 682 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 1.5416107382550337e-05, |
| "loss": 1.418, |
| "step": 683 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 1.5409395973154366e-05, |
| "loss": 1.4287, |
| "step": 684 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 1.540268456375839e-05, |
| "loss": 1.4482, |
| "step": 685 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 1.539597315436242e-05, |
| "loss": 1.3604, |
| "step": 686 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 1.5389261744966445e-05, |
| "loss": 1.3945, |
| "step": 687 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 1.5382550335570473e-05, |
| "loss": 1.4287, |
| "step": 688 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 1.5375838926174498e-05, |
| "loss": 1.4834, |
| "step": 689 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 1.5369127516778523e-05, |
| "loss": 1.4316, |
| "step": 690 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 1.5362416107382552e-05, |
| "loss": 1.4277, |
| "step": 691 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 1.5355704697986577e-05, |
| "loss": 1.418, |
| "step": 692 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 1.5348993288590606e-05, |
| "loss": 1.4248, |
| "step": 693 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 1.534228187919463e-05, |
| "loss": 1.4463, |
| "step": 694 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 1.533557046979866e-05, |
| "loss": 1.457, |
| "step": 695 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 1.5328859060402685e-05, |
| "loss": 1.4131, |
| "step": 696 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 1.5322147651006713e-05, |
| "loss": 1.4443, |
| "step": 697 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 1.5315436241610738e-05, |
| "loss": 1.4141, |
| "step": 698 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 1.5308724832214767e-05, |
| "loss": 1.4414, |
| "step": 699 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 1.5302013422818792e-05, |
| "loss": 1.3906, |
| "step": 700 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 1.529530201342282e-05, |
| "loss": 1.4248, |
| "step": 701 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 1.5288590604026846e-05, |
| "loss": 1.4316, |
| "step": 702 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 1.5281879194630874e-05, |
| "loss": 1.3867, |
| "step": 703 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 1.5275167785234903e-05, |
| "loss": 1.4395, |
| "step": 704 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 1.5268456375838928e-05, |
| "loss": 1.4092, |
| "step": 705 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 1.5261744966442956e-05, |
| "loss": 1.4434, |
| "step": 706 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 1.5255033557046982e-05, |
| "loss": 1.4482, |
| "step": 707 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 1.5248322147651009e-05, |
| "loss": 1.4619, |
| "step": 708 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 1.5241610738255035e-05, |
| "loss": 1.4141, |
| "step": 709 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 1.523489932885906e-05, |
| "loss": 1.457, |
| "step": 710 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 1.5228187919463089e-05, |
| "loss": 1.4238, |
| "step": 711 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 1.5221476510067114e-05, |
| "loss": 1.4434, |
| "step": 712 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 1.5214765100671143e-05, |
| "loss": 1.415, |
| "step": 713 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 1.5208053691275168e-05, |
| "loss": 1.46, |
| "step": 714 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 1.5201342281879196e-05, |
| "loss": 1.4326, |
| "step": 715 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 1.5194630872483223e-05, |
| "loss": 1.415, |
| "step": 716 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 1.518791946308725e-05, |
| "loss": 1.4189, |
| "step": 717 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 1.5181208053691277e-05, |
| "loss": 1.4268, |
| "step": 718 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 1.5174496644295302e-05, |
| "loss": 1.4141, |
| "step": 719 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 1.516778523489933e-05, |
| "loss": 1.3965, |
| "step": 720 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 1.5161073825503356e-05, |
| "loss": 1.4639, |
| "step": 721 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 1.5154362416107384e-05, |
| "loss": 1.415, |
| "step": 722 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 1.514765100671141e-05, |
| "loss": 1.4023, |
| "step": 723 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 1.5140939597315438e-05, |
| "loss": 1.4473, |
| "step": 724 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 1.5134228187919463e-05, |
| "loss": 1.4248, |
| "step": 725 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 1.5127516778523492e-05, |
| "loss": 1.4141, |
| "step": 726 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 1.5120805369127519e-05, |
| "loss": 1.4434, |
| "step": 727 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 1.5114093959731544e-05, |
| "loss": 1.4131, |
| "step": 728 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 1.5107382550335572e-05, |
| "loss": 1.458, |
| "step": 729 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 1.5100671140939598e-05, |
| "loss": 1.4355, |
| "step": 730 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 1.5093959731543626e-05, |
| "loss": 1.4141, |
| "step": 731 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 1.5087248322147651e-05, |
| "loss": 1.417, |
| "step": 732 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 1.508053691275168e-05, |
| "loss": 1.4346, |
| "step": 733 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 1.5073825503355705e-05, |
| "loss": 1.4209, |
| "step": 734 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 1.5067114093959734e-05, |
| "loss": 1.4258, |
| "step": 735 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 1.506040268456376e-05, |
| "loss": 1.4121, |
| "step": 736 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 1.5053691275167786e-05, |
| "loss": 1.4307, |
| "step": 737 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 1.5046979865771814e-05, |
| "loss": 1.4326, |
| "step": 738 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 1.504026845637584e-05, |
| "loss": 1.4121, |
| "step": 739 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 1.5033557046979868e-05, |
| "loss": 1.4385, |
| "step": 740 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 1.5026845637583893e-05, |
| "loss": 1.4619, |
| "step": 741 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 1.5020134228187922e-05, |
| "loss": 1.4199, |
| "step": 742 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 1.5013422818791947e-05, |
| "loss": 1.4541, |
| "step": 743 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 1.5006711409395975e-05, |
| "loss": 1.3936, |
| "step": 744 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 1.5000000000000002e-05, |
| "loss": 1.415, |
| "step": 745 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 1.4993288590604027e-05, |
| "loss": 1.4238, |
| "step": 746 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 1.4986577181208056e-05, |
| "loss": 1.4268, |
| "step": 747 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 1.4979865771812081e-05, |
| "loss": 1.4453, |
| "step": 748 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 1.497315436241611e-05, |
| "loss": 1.4092, |
| "step": 749 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 1.4966442953020135e-05, |
| "loss": 1.4678, |
| "step": 750 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 1.4959731543624163e-05, |
| "loss": 1.4248, |
| "step": 751 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 1.4953020134228188e-05, |
| "loss": 1.4521, |
| "step": 752 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 1.4946308724832217e-05, |
| "loss": 1.4258, |
| "step": 753 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 1.4939597315436242e-05, |
| "loss": 1.418, |
| "step": 754 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 1.4932885906040269e-05, |
| "loss": 1.4219, |
| "step": 755 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 1.4926174496644297e-05, |
| "loss": 1.416, |
| "step": 756 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 1.4919463087248323e-05, |
| "loss": 1.4482, |
| "step": 757 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 1.4912751677852351e-05, |
| "loss": 1.4072, |
| "step": 758 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 1.4906040268456376e-05, |
| "loss": 1.4287, |
| "step": 759 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 1.4899328859060405e-05, |
| "loss": 1.3936, |
| "step": 760 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 1.489261744966443e-05, |
| "loss": 1.4043, |
| "step": 761 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 1.4885906040268459e-05, |
| "loss": 1.4502, |
| "step": 762 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 1.4879194630872484e-05, |
| "loss": 1.4209, |
| "step": 763 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 1.487248322147651e-05, |
| "loss": 1.3799, |
| "step": 764 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 1.4865771812080539e-05, |
| "loss": 1.4209, |
| "step": 765 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 1.4859060402684564e-05, |
| "loss": 1.4336, |
| "step": 766 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 1.4852348993288593e-05, |
| "loss": 1.4414, |
| "step": 767 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 1.4845637583892618e-05, |
| "loss": 1.4453, |
| "step": 768 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 1.4838926174496647e-05, |
| "loss": 1.4121, |
| "step": 769 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 1.4832214765100672e-05, |
| "loss": 1.4248, |
| "step": 770 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 1.48255033557047e-05, |
| "loss": 1.4277, |
| "step": 771 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 1.4818791946308725e-05, |
| "loss": 1.4248, |
| "step": 772 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 1.4812080536912752e-05, |
| "loss": 1.4033, |
| "step": 773 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 1.4805369127516779e-05, |
| "loss": 1.4121, |
| "step": 774 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 1.4798657718120806e-05, |
| "loss": 1.4336, |
| "step": 775 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 1.4791946308724835e-05, |
| "loss": 1.4561, |
| "step": 776 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 1.478523489932886e-05, |
| "loss": 1.4033, |
| "step": 777 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 1.4778523489932888e-05, |
| "loss": 1.417, |
| "step": 778 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 1.4771812080536913e-05, |
| "loss": 1.3955, |
| "step": 779 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 1.4765100671140942e-05, |
| "loss": 1.3994, |
| "step": 780 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 1.4758389261744967e-05, |
| "loss": 1.4092, |
| "step": 781 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 1.4751677852348994e-05, |
| "loss": 1.4023, |
| "step": 782 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 1.474496644295302e-05, |
| "loss": 1.4229, |
| "step": 783 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 1.4738255033557048e-05, |
| "loss": 1.4268, |
| "step": 784 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 1.4731543624161076e-05, |
| "loss": 1.4307, |
| "step": 785 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 1.4724832214765101e-05, |
| "loss": 1.4404, |
| "step": 786 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 1.471812080536913e-05, |
| "loss": 1.375, |
| "step": 787 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 1.4711409395973155e-05, |
| "loss": 1.4238, |
| "step": 788 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 1.4704697986577184e-05, |
| "loss": 1.4346, |
| "step": 789 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 1.4697986577181209e-05, |
| "loss": 1.3721, |
| "step": 790 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 1.4691275167785236e-05, |
| "loss": 1.418, |
| "step": 791 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 1.4684563758389262e-05, |
| "loss": 1.458, |
| "step": 792 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 1.467785234899329e-05, |
| "loss": 1.4121, |
| "step": 793 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 1.4671140939597316e-05, |
| "loss": 1.3936, |
| "step": 794 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 1.4664429530201343e-05, |
| "loss": 1.4131, |
| "step": 795 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 1.4657718120805372e-05, |
| "loss": 1.4014, |
| "step": 796 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 1.4651006711409397e-05, |
| "loss": 1.4424, |
| "step": 797 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 1.4644295302013425e-05, |
| "loss": 1.4258, |
| "step": 798 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 1.463758389261745e-05, |
| "loss": 1.4082, |
| "step": 799 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 1.4630872483221479e-05, |
| "loss": 1.4033, |
| "step": 800 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 1.4624161073825504e-05, |
| "loss": 1.4316, |
| "step": 801 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 1.4617449664429531e-05, |
| "loss": 1.4336, |
| "step": 802 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 1.4610738255033558e-05, |
| "loss": 1.4033, |
| "step": 803 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 1.4604026845637585e-05, |
| "loss": 1.417, |
| "step": 804 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 1.4597315436241613e-05, |
| "loss": 1.3975, |
| "step": 805 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 1.4590604026845638e-05, |
| "loss": 1.4082, |
| "step": 806 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 1.4583892617449667e-05, |
| "loss": 1.3926, |
| "step": 807 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 1.4577181208053692e-05, |
| "loss": 1.4502, |
| "step": 808 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 1.457046979865772e-05, |
| "loss": 1.3613, |
| "step": 809 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 1.4563758389261746e-05, |
| "loss": 1.3828, |
| "step": 810 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 1.4557046979865773e-05, |
| "loss": 1.4258, |
| "step": 811 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 1.45503355704698e-05, |
| "loss": 1.417, |
| "step": 812 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 1.4543624161073826e-05, |
| "loss": 1.4014, |
| "step": 813 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 1.4536912751677855e-05, |
| "loss": 1.418, |
| "step": 814 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 1.453020134228188e-05, |
| "loss": 1.3965, |
| "step": 815 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 1.4523489932885909e-05, |
| "loss": 1.4111, |
| "step": 816 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 1.4516778523489934e-05, |
| "loss": 1.4434, |
| "step": 817 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 1.4510067114093962e-05, |
| "loss": 1.4355, |
| "step": 818 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 1.4503355704697988e-05, |
| "loss": 1.3965, |
| "step": 819 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 1.4496644295302014e-05, |
| "loss": 1.3848, |
| "step": 820 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 1.4489932885906041e-05, |
| "loss": 1.418, |
| "step": 821 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 1.4483221476510068e-05, |
| "loss": 1.4131, |
| "step": 822 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 1.4476510067114095e-05, |
| "loss": 1.4443, |
| "step": 823 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 1.4469798657718122e-05, |
| "loss": 1.416, |
| "step": 824 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 1.446308724832215e-05, |
| "loss": 1.3955, |
| "step": 825 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 1.4456375838926175e-05, |
| "loss": 1.4297, |
| "step": 826 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 1.4449664429530204e-05, |
| "loss": 1.3799, |
| "step": 827 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 1.444295302013423e-05, |
| "loss": 1.4199, |
| "step": 828 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 1.4436241610738256e-05, |
| "loss": 1.3926, |
| "step": 829 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 1.4429530201342283e-05, |
| "loss": 1.4297, |
| "step": 830 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 1.442281879194631e-05, |
| "loss": 1.4121, |
| "step": 831 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 1.4416107382550337e-05, |
| "loss": 1.4453, |
| "step": 832 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 1.4409395973154363e-05, |
| "loss": 1.4209, |
| "step": 833 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 1.4402684563758392e-05, |
| "loss": 1.4336, |
| "step": 834 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 1.4395973154362417e-05, |
| "loss": 1.3965, |
| "step": 835 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 1.4389261744966446e-05, |
| "loss": 1.4053, |
| "step": 836 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 1.4382550335570471e-05, |
| "loss": 1.4141, |
| "step": 837 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 1.4375838926174496e-05, |
| "loss": 1.4092, |
| "step": 838 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 1.4369127516778525e-05, |
| "loss": 1.4082, |
| "step": 839 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 1.4362416107382551e-05, |
| "loss": 1.4131, |
| "step": 840 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 1.4355704697986578e-05, |
| "loss": 1.4277, |
| "step": 841 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 1.4348993288590605e-05, |
| "loss": 1.4023, |
| "step": 842 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 1.4342281879194632e-05, |
| "loss": 1.4297, |
| "step": 843 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 1.4335570469798659e-05, |
| "loss": 1.4111, |
| "step": 844 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 1.4328859060402687e-05, |
| "loss": 1.3955, |
| "step": 845 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 1.4322147651006713e-05, |
| "loss": 1.4219, |
| "step": 846 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 1.4315436241610738e-05, |
| "loss": 1.4082, |
| "step": 847 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 1.4308724832214766e-05, |
| "loss": 1.3926, |
| "step": 848 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 1.4302013422818793e-05, |
| "loss": 1.4277, |
| "step": 849 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 1.429530201342282e-05, |
| "loss": 1.3867, |
| "step": 850 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 1.4288590604026847e-05, |
| "loss": 1.4043, |
| "step": 851 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 1.4281879194630874e-05, |
| "loss": 1.4131, |
| "step": 852 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 1.42751677852349e-05, |
| "loss": 1.4297, |
| "step": 853 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 1.4268456375838929e-05, |
| "loss": 1.3887, |
| "step": 854 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 1.4261744966442954e-05, |
| "loss": 1.4238, |
| "step": 855 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 1.425503355704698e-05, |
| "loss": 1.4082, |
| "step": 856 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 1.4248322147651008e-05, |
| "loss": 1.4355, |
| "step": 857 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 1.4241610738255035e-05, |
| "loss": 1.418, |
| "step": 858 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 1.4234899328859062e-05, |
| "loss": 1.4189, |
| "step": 859 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 1.4228187919463088e-05, |
| "loss": 1.418, |
| "step": 860 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 1.4221476510067115e-05, |
| "loss": 1.4004, |
| "step": 861 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 1.4214765100671142e-05, |
| "loss": 1.4072, |
| "step": 862 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 1.4208053691275169e-05, |
| "loss": 1.3945, |
| "step": 863 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 1.4201342281879196e-05, |
| "loss": 1.4033, |
| "step": 864 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 1.4194630872483221e-05, |
| "loss": 1.3945, |
| "step": 865 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 1.418791946308725e-05, |
| "loss": 1.3779, |
| "step": 866 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 1.4181208053691275e-05, |
| "loss": 1.418, |
| "step": 867 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 1.4174496644295303e-05, |
| "loss": 1.415, |
| "step": 868 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 1.416778523489933e-05, |
| "loss": 1.3916, |
| "step": 869 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 1.4161073825503357e-05, |
| "loss": 1.3975, |
| "step": 870 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 1.4154362416107384e-05, |
| "loss": 1.4336, |
| "step": 871 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 1.414765100671141e-05, |
| "loss": 1.4102, |
| "step": 872 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 1.4140939597315438e-05, |
| "loss": 1.3887, |
| "step": 873 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 1.4134228187919463e-05, |
| "loss": 1.3887, |
| "step": 874 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 1.4127516778523491e-05, |
| "loss": 1.4541, |
| "step": 875 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 1.4120805369127516e-05, |
| "loss": 1.415, |
| "step": 876 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 1.4114093959731545e-05, |
| "loss": 1.3779, |
| "step": 877 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 1.4107382550335572e-05, |
| "loss": 1.4082, |
| "step": 878 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 1.4100671140939599e-05, |
| "loss": 1.4258, |
| "step": 879 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 1.4093959731543626e-05, |
| "loss": 1.3877, |
| "step": 880 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 1.4087248322147652e-05, |
| "loss": 1.3877, |
| "step": 881 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 1.408053691275168e-05, |
| "loss": 1.376, |
| "step": 882 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 1.4073825503355704e-05, |
| "loss": 1.3994, |
| "step": 883 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 1.4067114093959733e-05, |
| "loss": 1.3965, |
| "step": 884 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 1.4060402684563758e-05, |
| "loss": 1.4092, |
| "step": 885 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 1.4053691275167787e-05, |
| "loss": 1.3955, |
| "step": 886 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 1.4046979865771812e-05, |
| "loss": 1.3828, |
| "step": 887 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 1.404026845637584e-05, |
| "loss": 1.3809, |
| "step": 888 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 1.4033557046979867e-05, |
| "loss": 1.3838, |
| "step": 889 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 1.4026845637583894e-05, |
| "loss": 1.3916, |
| "step": 890 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 1.4020134228187921e-05, |
| "loss": 1.3848, |
| "step": 891 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 1.4013422818791948e-05, |
| "loss": 1.4014, |
| "step": 892 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 1.4006711409395975e-05, |
| "loss": 1.415, |
| "step": 893 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 1.4e-05, |
| "loss": 1.4189, |
| "step": 894 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 1.3993288590604028e-05, |
| "loss": 1.3877, |
| "step": 895 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 1.3986577181208053e-05, |
| "loss": 1.4219, |
| "step": 896 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 1.3979865771812082e-05, |
| "loss": 1.3896, |
| "step": 897 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 1.3973154362416109e-05, |
| "loss": 1.3975, |
| "step": 898 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 1.3966442953020136e-05, |
| "loss": 1.3652, |
| "step": 899 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 1.3959731543624163e-05, |
| "loss": 1.3848, |
| "step": 900 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 1.395302013422819e-05, |
| "loss": 1.3945, |
| "step": 901 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 1.3946308724832216e-05, |
| "loss": 1.3965, |
| "step": 902 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 1.3939597315436241e-05, |
| "loss": 1.3984, |
| "step": 903 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 1.393288590604027e-05, |
| "loss": 1.3672, |
| "step": 904 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 1.3926174496644295e-05, |
| "loss": 1.3877, |
| "step": 905 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 1.3919463087248324e-05, |
| "loss": 1.4404, |
| "step": 906 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 1.3912751677852349e-05, |
| "loss": 1.3809, |
| "step": 907 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 1.3906040268456377e-05, |
| "loss": 1.415, |
| "step": 908 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 1.3899328859060404e-05, |
| "loss": 1.4082, |
| "step": 909 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 1.3892617449664431e-05, |
| "loss": 1.3945, |
| "step": 910 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 1.3885906040268458e-05, |
| "loss": 1.3701, |
| "step": 911 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 1.3879194630872483e-05, |
| "loss": 1.3906, |
| "step": 912 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 1.3872483221476512e-05, |
| "loss": 1.4033, |
| "step": 913 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 1.3865771812080537e-05, |
| "loss": 1.3818, |
| "step": 914 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 1.3859060402684565e-05, |
| "loss": 1.4277, |
| "step": 915 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 1.385234899328859e-05, |
| "loss": 1.3975, |
| "step": 916 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 1.3845637583892619e-05, |
| "loss": 1.4375, |
| "step": 917 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 1.3838926174496646e-05, |
| "loss": 1.3955, |
| "step": 918 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 1.3832214765100673e-05, |
| "loss": 1.3867, |
| "step": 919 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 1.38255033557047e-05, |
| "loss": 1.4463, |
| "step": 920 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 1.3818791946308725e-05, |
| "loss": 1.3545, |
| "step": 921 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 1.3812080536912753e-05, |
| "loss": 1.3652, |
| "step": 922 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 1.3805369127516779e-05, |
| "loss": 1.3623, |
| "step": 923 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 1.3798657718120807e-05, |
| "loss": 1.4053, |
| "step": 924 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 1.3791946308724832e-05, |
| "loss": 1.3965, |
| "step": 925 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 1.378523489932886e-05, |
| "loss": 1.3877, |
| "step": 926 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 1.3778523489932888e-05, |
| "loss": 1.3975, |
| "step": 927 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 1.3771812080536914e-05, |
| "loss": 1.374, |
| "step": 928 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 1.3765100671140941e-05, |
| "loss": 1.4102, |
| "step": 929 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 1.3758389261744966e-05, |
| "loss": 1.4092, |
| "step": 930 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 1.3751677852348995e-05, |
| "loss": 1.4268, |
| "step": 931 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 1.374496644295302e-05, |
| "loss": 1.3623, |
| "step": 932 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 1.3738255033557049e-05, |
| "loss": 1.3809, |
| "step": 933 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 1.3731543624161074e-05, |
| "loss": 1.3867, |
| "step": 934 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 1.3724832214765102e-05, |
| "loss": 1.3672, |
| "step": 935 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 1.3718120805369128e-05, |
| "loss": 1.4043, |
| "step": 936 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 1.3711409395973156e-05, |
| "loss": 1.4268, |
| "step": 937 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 1.3704697986577183e-05, |
| "loss": 1.377, |
| "step": 938 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 1.3697986577181208e-05, |
| "loss": 1.3652, |
| "step": 939 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 1.3691275167785237e-05, |
| "loss": 1.3818, |
| "step": 940 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 1.3684563758389262e-05, |
| "loss": 1.4092, |
| "step": 941 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 1.367785234899329e-05, |
| "loss": 1.4014, |
| "step": 942 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 1.3671140939597316e-05, |
| "loss": 1.3818, |
| "step": 943 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 1.3664429530201344e-05, |
| "loss": 1.3984, |
| "step": 944 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 1.365771812080537e-05, |
| "loss": 1.3916, |
| "step": 945 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 1.3651006711409398e-05, |
| "loss": 1.3887, |
| "step": 946 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 1.3644295302013425e-05, |
| "loss": 1.418, |
| "step": 947 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 1.363758389261745e-05, |
| "loss": 1.4033, |
| "step": 948 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 1.3630872483221478e-05, |
| "loss": 1.3535, |
| "step": 949 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 1.3624161073825504e-05, |
| "loss": 1.3848, |
| "step": 950 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 1.3617449664429532e-05, |
| "loss": 1.417, |
| "step": 951 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 1.3610738255033557e-05, |
| "loss": 1.4053, |
| "step": 952 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 1.3604026845637586e-05, |
| "loss": 1.4082, |
| "step": 953 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 1.3597315436241611e-05, |
| "loss": 1.4062, |
| "step": 954 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 1.359060402684564e-05, |
| "loss": 1.3887, |
| "step": 955 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 1.3583892617449665e-05, |
| "loss": 1.3975, |
| "step": 956 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 1.3577181208053692e-05, |
| "loss": 1.3984, |
| "step": 957 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 1.357046979865772e-05, |
| "loss": 1.4082, |
| "step": 958 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 1.3563758389261745e-05, |
| "loss": 1.3994, |
| "step": 959 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 1.3557046979865774e-05, |
| "loss": 1.3984, |
| "step": 960 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 1.3550335570469799e-05, |
| "loss": 1.4033, |
| "step": 961 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 1.3543624161073827e-05, |
| "loss": 1.3838, |
| "step": 962 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 1.3536912751677853e-05, |
| "loss": 1.3691, |
| "step": 963 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 1.3530201342281881e-05, |
| "loss": 1.3809, |
| "step": 964 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 1.3523489932885906e-05, |
| "loss": 1.3926, |
| "step": 965 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 1.3516778523489933e-05, |
| "loss": 1.3604, |
| "step": 966 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 1.3510067114093962e-05, |
| "loss": 1.3584, |
| "step": 967 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 1.3503355704697987e-05, |
| "loss": 1.3828, |
| "step": 968 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 1.3496644295302015e-05, |
| "loss": 1.4248, |
| "step": 969 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 1.348993288590604e-05, |
| "loss": 1.373, |
| "step": 970 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 1.348322147651007e-05, |
| "loss": 1.3838, |
| "step": 971 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 1.3476510067114094e-05, |
| "loss": 1.3887, |
| "step": 972 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 1.3469798657718123e-05, |
| "loss": 1.4072, |
| "step": 973 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 1.3463087248322148e-05, |
| "loss": 1.3701, |
| "step": 974 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 1.3456375838926175e-05, |
| "loss": 1.3887, |
| "step": 975 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 1.3449664429530202e-05, |
| "loss": 1.3945, |
| "step": 976 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 1.3442953020134229e-05, |
| "loss": 1.3926, |
| "step": 977 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 1.3436241610738257e-05, |
| "loss": 1.3848, |
| "step": 978 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 1.3429530201342282e-05, |
| "loss": 1.3613, |
| "step": 979 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 1.342281879194631e-05, |
| "loss": 1.374, |
| "step": 980 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 1.3416107382550336e-05, |
| "loss": 1.373, |
| "step": 981 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 1.3409395973154365e-05, |
| "loss": 1.3965, |
| "step": 982 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 1.340268456375839e-05, |
| "loss": 1.4326, |
| "step": 983 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 1.3395973154362418e-05, |
| "loss": 1.4043, |
| "step": 984 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 1.3389261744966443e-05, |
| "loss": 1.4033, |
| "step": 985 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 1.338255033557047e-05, |
| "loss": 1.3809, |
| "step": 986 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 1.3375838926174499e-05, |
| "loss": 1.3799, |
| "step": 987 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 1.3369127516778524e-05, |
| "loss": 1.3994, |
| "step": 988 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 1.3362416107382553e-05, |
| "loss": 1.3525, |
| "step": 989 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 1.3355704697986578e-05, |
| "loss": 1.4326, |
| "step": 990 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 1.3348993288590606e-05, |
| "loss": 1.3936, |
| "step": 991 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 1.3342281879194631e-05, |
| "loss": 1.4043, |
| "step": 992 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 1.333557046979866e-05, |
| "loss": 1.4316, |
| "step": 993 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 1.3328859060402685e-05, |
| "loss": 1.3867, |
| "step": 994 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 1.3322147651006712e-05, |
| "loss": 1.3955, |
| "step": 995 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 1.331543624161074e-05, |
| "loss": 1.3516, |
| "step": 996 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 1.3308724832214766e-05, |
| "loss": 1.3906, |
| "step": 997 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 1.3302013422818794e-05, |
| "loss": 1.3828, |
| "step": 998 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 1.329530201342282e-05, |
| "loss": 1.3994, |
| "step": 999 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 1.3288590604026848e-05, |
| "loss": 1.3867, |
| "step": 1000 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 1.3281879194630873e-05, |
| "loss": 1.4258, |
| "step": 1001 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 1.3275167785234902e-05, |
| "loss": 1.4053, |
| "step": 1002 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 1.3268456375838927e-05, |
| "loss": 1.3906, |
| "step": 1003 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 1.3261744966442954e-05, |
| "loss": 1.3809, |
| "step": 1004 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 1.325503355704698e-05, |
| "loss": 1.3574, |
| "step": 1005 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 1.3248322147651007e-05, |
| "loss": 1.416, |
| "step": 1006 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 1.3241610738255036e-05, |
| "loss": 1.3516, |
| "step": 1007 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 1.3234899328859061e-05, |
| "loss": 1.3818, |
| "step": 1008 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 1.322818791946309e-05, |
| "loss": 1.4062, |
| "step": 1009 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 1.3221476510067115e-05, |
| "loss": 1.3691, |
| "step": 1010 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 1.3214765100671143e-05, |
| "loss": 1.373, |
| "step": 1011 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 1.3208053691275168e-05, |
| "loss": 1.3857, |
| "step": 1012 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 1.3201342281879195e-05, |
| "loss": 1.3711, |
| "step": 1013 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 1.3194630872483222e-05, |
| "loss": 1.3887, |
| "step": 1014 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 1.3187919463087249e-05, |
| "loss": 1.3467, |
| "step": 1015 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 1.3181208053691278e-05, |
| "loss": 1.4092, |
| "step": 1016 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 1.3174496644295303e-05, |
| "loss": 1.3926, |
| "step": 1017 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 1.3167785234899331e-05, |
| "loss": 1.4004, |
| "step": 1018 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 1.3161073825503356e-05, |
| "loss": 1.3682, |
| "step": 1019 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 1.3154362416107385e-05, |
| "loss": 1.4062, |
| "step": 1020 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 1.314765100671141e-05, |
| "loss": 1.376, |
| "step": 1021 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 1.3140939597315437e-05, |
| "loss": 1.3877, |
| "step": 1022 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 1.3134228187919464e-05, |
| "loss": 1.3867, |
| "step": 1023 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 1.312751677852349e-05, |
| "loss": 1.3525, |
| "step": 1024 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 1.3120805369127518e-05, |
| "loss": 1.3535, |
| "step": 1025 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 1.3114093959731544e-05, |
| "loss": 1.3633, |
| "step": 1026 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 1.3107382550335573e-05, |
| "loss": 1.4023, |
| "step": 1027 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 1.3100671140939598e-05, |
| "loss": 1.3633, |
| "step": 1028 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 1.3093959731543627e-05, |
| "loss": 1.3857, |
| "step": 1029 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 1.3087248322147652e-05, |
| "loss": 1.3682, |
| "step": 1030 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 1.3080536912751679e-05, |
| "loss": 1.3604, |
| "step": 1031 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 1.3073825503355706e-05, |
| "loss": 1.3291, |
| "step": 1032 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 1.3067114093959732e-05, |
| "loss": 1.377, |
| "step": 1033 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 1.306040268456376e-05, |
| "loss": 1.373, |
| "step": 1034 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 1.3053691275167786e-05, |
| "loss": 1.3438, |
| "step": 1035 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 1.3046979865771815e-05, |
| "loss": 1.4082, |
| "step": 1036 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 1.304026845637584e-05, |
| "loss": 1.3916, |
| "step": 1037 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 1.3033557046979868e-05, |
| "loss": 1.3818, |
| "step": 1038 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 1.3026845637583893e-05, |
| "loss": 1.3936, |
| "step": 1039 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 1.302013422818792e-05, |
| "loss": 1.3574, |
| "step": 1040 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 1.3013422818791947e-05, |
| "loss": 1.3896, |
| "step": 1041 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 1.3006711409395974e-05, |
| "loss": 1.375, |
| "step": 1042 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 1.3000000000000001e-05, |
| "loss": 1.4268, |
| "step": 1043 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 1.2993288590604028e-05, |
| "loss": 1.3623, |
| "step": 1044 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 1.2986577181208055e-05, |
| "loss": 1.4199, |
| "step": 1045 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 1.2979865771812081e-05, |
| "loss": 1.3682, |
| "step": 1046 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 1.297315436241611e-05, |
| "loss": 1.3389, |
| "step": 1047 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 1.2966442953020135e-05, |
| "loss": 1.3643, |
| "step": 1048 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 1.295973154362416e-05, |
| "loss": 1.3555, |
| "step": 1049 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 1.2953020134228189e-05, |
| "loss": 1.3721, |
| "step": 1050 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 1.2946308724832216e-05, |
| "loss": 1.3525, |
| "step": 1051 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 1.2939597315436243e-05, |
| "loss": 1.3789, |
| "step": 1052 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 1.293288590604027e-05, |
| "loss": 1.3906, |
| "step": 1053 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 1.2926174496644296e-05, |
| "loss": 1.3867, |
| "step": 1054 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 1.2919463087248323e-05, |
| "loss": 1.3936, |
| "step": 1055 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 1.2912751677852352e-05, |
| "loss": 1.3906, |
| "step": 1056 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 1.2906040268456377e-05, |
| "loss": 1.3613, |
| "step": 1057 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 1.2899328859060402e-05, |
| "loss": 1.3965, |
| "step": 1058 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 1.289261744966443e-05, |
| "loss": 1.3496, |
| "step": 1059 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 1.2885906040268457e-05, |
| "loss": 1.3672, |
| "step": 1060 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 1.2879194630872484e-05, |
| "loss": 1.3564, |
| "step": 1061 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 1.2872483221476511e-05, |
| "loss": 1.3662, |
| "step": 1062 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 1.2865771812080538e-05, |
| "loss": 1.3711, |
| "step": 1063 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 1.2859060402684565e-05, |
| "loss": 1.373, |
| "step": 1064 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 1.2852348993288593e-05, |
| "loss": 1.3818, |
| "step": 1065 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 1.2845637583892619e-05, |
| "loss": 1.3389, |
| "step": 1066 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 1.2838926174496644e-05, |
| "loss": 1.3643, |
| "step": 1067 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 1.2832214765100672e-05, |
| "loss": 1.3662, |
| "step": 1068 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 1.2825503355704697e-05, |
| "loss": 1.3672, |
| "step": 1069 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 1.2818791946308726e-05, |
| "loss": 1.4082, |
| "step": 1070 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 1.2812080536912753e-05, |
| "loss": 1.3965, |
| "step": 1071 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 1.280536912751678e-05, |
| "loss": 1.3916, |
| "step": 1072 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 1.2798657718120806e-05, |
| "loss": 1.4121, |
| "step": 1073 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 1.2791946308724833e-05, |
| "loss": 1.374, |
| "step": 1074 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 1.278523489932886e-05, |
| "loss": 1.3809, |
| "step": 1075 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 1.2778523489932889e-05, |
| "loss": 1.3926, |
| "step": 1076 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 1.2771812080536914e-05, |
| "loss": 1.3525, |
| "step": 1077 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 1.2765100671140939e-05, |
| "loss": 1.3477, |
| "step": 1078 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 1.2758389261744968e-05, |
| "loss": 1.3652, |
| "step": 1079 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 1.2751677852348994e-05, |
| "loss": 1.3682, |
| "step": 1080 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 1.2744966442953021e-05, |
| "loss": 1.4102, |
| "step": 1081 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 1.2738255033557048e-05, |
| "loss": 1.3779, |
| "step": 1082 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 1.2731543624161075e-05, |
| "loss": 1.3906, |
| "step": 1083 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 1.2724832214765102e-05, |
| "loss": 1.3857, |
| "step": 1084 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 1.271812080536913e-05, |
| "loss": 1.3623, |
| "step": 1085 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 1.2711409395973156e-05, |
| "loss": 1.375, |
| "step": 1086 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 1.270469798657718e-05, |
| "loss": 1.3516, |
| "step": 1087 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 1.269798657718121e-05, |
| "loss": 1.3457, |
| "step": 1088 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 1.2691275167785234e-05, |
| "loss": 1.3984, |
| "step": 1089 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 1.2684563758389263e-05, |
| "loss": 1.3945, |
| "step": 1090 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 1.267785234899329e-05, |
| "loss": 1.3916, |
| "step": 1091 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 1.2671140939597317e-05, |
| "loss": 1.3711, |
| "step": 1092 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 1.2664429530201344e-05, |
| "loss": 1.3721, |
| "step": 1093 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 1.265771812080537e-05, |
| "loss": 1.3965, |
| "step": 1094 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 1.2651006711409397e-05, |
| "loss": 1.3564, |
| "step": 1095 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 1.2644295302013422e-05, |
| "loss": 1.375, |
| "step": 1096 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 1.2637583892617451e-05, |
| "loss": 1.3779, |
| "step": 1097 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 1.2630872483221476e-05, |
| "loss": 1.374, |
| "step": 1098 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 1.2624161073825505e-05, |
| "loss": 1.3809, |
| "step": 1099 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 1.2617449664429532e-05, |
| "loss": 1.3975, |
| "step": 1100 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 1.2610738255033558e-05, |
| "loss": 1.4033, |
| "step": 1101 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 1.2604026845637585e-05, |
| "loss": 1.3467, |
| "step": 1102 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 1.2597315436241612e-05, |
| "loss": 1.3936, |
| "step": 1103 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 1.2590604026845639e-05, |
| "loss": 1.3906, |
| "step": 1104 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 1.2583892617449664e-05, |
| "loss": 1.4062, |
| "step": 1105 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 1.2577181208053693e-05, |
| "loss": 1.3906, |
| "step": 1106 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 1.2570469798657718e-05, |
| "loss": 1.3906, |
| "step": 1107 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 1.2563758389261746e-05, |
| "loss": 1.3477, |
| "step": 1108 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 1.2557046979865773e-05, |
| "loss": 1.3477, |
| "step": 1109 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 1.25503355704698e-05, |
| "loss": 1.3955, |
| "step": 1110 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 1.2543624161073827e-05, |
| "loss": 1.3701, |
| "step": 1111 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 1.2536912751677854e-05, |
| "loss": 1.3516, |
| "step": 1112 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 1.253020134228188e-05, |
| "loss": 1.3701, |
| "step": 1113 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 1.2523489932885906e-05, |
| "loss": 1.3662, |
| "step": 1114 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 1.2516778523489934e-05, |
| "loss": 1.3643, |
| "step": 1115 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 1.251006711409396e-05, |
| "loss": 1.3955, |
| "step": 1116 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 1.2503355704697988e-05, |
| "loss": 1.3994, |
| "step": 1117 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 1.2496644295302013e-05, |
| "loss": 1.3701, |
| "step": 1118 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 1.2489932885906042e-05, |
| "loss": 1.3633, |
| "step": 1119 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 1.2483221476510069e-05, |
| "loss": 1.3682, |
| "step": 1120 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 1.2476510067114095e-05, |
| "loss": 1.373, |
| "step": 1121 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 1.2469798657718122e-05, |
| "loss": 1.3984, |
| "step": 1122 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 1.2463087248322147e-05, |
| "loss": 1.377, |
| "step": 1123 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 1.2456375838926176e-05, |
| "loss": 1.3672, |
| "step": 1124 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 1.2449664429530201e-05, |
| "loss": 1.3506, |
| "step": 1125 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 1.244295302013423e-05, |
| "loss": 1.3408, |
| "step": 1126 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 1.2436241610738255e-05, |
| "loss": 1.332, |
| "step": 1127 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 1.2429530201342283e-05, |
| "loss": 1.3535, |
| "step": 1128 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 1.242281879194631e-05, |
| "loss": 1.3447, |
| "step": 1129 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 1.2416107382550337e-05, |
| "loss": 1.3535, |
| "step": 1130 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 1.2409395973154364e-05, |
| "loss": 1.3955, |
| "step": 1131 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 1.2402684563758389e-05, |
| "loss": 1.3867, |
| "step": 1132 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 1.2395973154362418e-05, |
| "loss": 1.3613, |
| "step": 1133 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 1.2389261744966443e-05, |
| "loss": 1.375, |
| "step": 1134 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 1.2382550335570471e-05, |
| "loss": 1.3701, |
| "step": 1135 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 1.2375838926174497e-05, |
| "loss": 1.3799, |
| "step": 1136 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 1.2369127516778525e-05, |
| "loss": 1.3672, |
| "step": 1137 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 1.236241610738255e-05, |
| "loss": 1.3711, |
| "step": 1138 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 1.2355704697986579e-05, |
| "loss": 1.3232, |
| "step": 1139 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 1.2348993288590606e-05, |
| "loss": 1.3496, |
| "step": 1140 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 1.234228187919463e-05, |
| "loss": 1.3809, |
| "step": 1141 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 1.233557046979866e-05, |
| "loss": 1.3828, |
| "step": 1142 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 1.2328859060402685e-05, |
| "loss": 1.374, |
| "step": 1143 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 1.2322147651006713e-05, |
| "loss": 1.3369, |
| "step": 1144 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 1.2315436241610738e-05, |
| "loss": 1.3164, |
| "step": 1145 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 1.2308724832214767e-05, |
| "loss": 1.3711, |
| "step": 1146 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 1.2302013422818792e-05, |
| "loss": 1.3584, |
| "step": 1147 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 1.229530201342282e-05, |
| "loss": 1.3994, |
| "step": 1148 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 1.2288590604026847e-05, |
| "loss": 1.3545, |
| "step": 1149 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 1.2281879194630872e-05, |
| "loss": 1.3613, |
| "step": 1150 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 1.2275167785234901e-05, |
| "loss": 1.3623, |
| "step": 1151 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 1.2268456375838926e-05, |
| "loss": 1.3457, |
| "step": 1152 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 1.2261744966442955e-05, |
| "loss": 1.3643, |
| "step": 1153 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 1.225503355704698e-05, |
| "loss": 1.3633, |
| "step": 1154 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 1.2248322147651008e-05, |
| "loss": 1.3906, |
| "step": 1155 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 1.2241610738255034e-05, |
| "loss": 1.3828, |
| "step": 1156 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 1.2234899328859062e-05, |
| "loss": 1.377, |
| "step": 1157 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 1.2228187919463087e-05, |
| "loss": 1.4004, |
| "step": 1158 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 1.2221476510067114e-05, |
| "loss": 1.3457, |
| "step": 1159 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 1.2214765100671143e-05, |
| "loss": 1.3506, |
| "step": 1160 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 1.2208053691275168e-05, |
| "loss": 1.3867, |
| "step": 1161 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 1.2201342281879196e-05, |
| "loss": 1.3408, |
| "step": 1162 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 1.2194630872483222e-05, |
| "loss": 1.3857, |
| "step": 1163 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 1.218791946308725e-05, |
| "loss": 1.3447, |
| "step": 1164 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 1.2181208053691275e-05, |
| "loss": 1.3799, |
| "step": 1165 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 1.2174496644295304e-05, |
| "loss": 1.3828, |
| "step": 1166 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 1.2167785234899329e-05, |
| "loss": 1.3213, |
| "step": 1167 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 1.2161073825503358e-05, |
| "loss": 1.3613, |
| "step": 1168 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 1.2154362416107384e-05, |
| "loss": 1.3516, |
| "step": 1169 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 1.214765100671141e-05, |
| "loss": 1.3809, |
| "step": 1170 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 1.2140939597315438e-05, |
| "loss": 1.4053, |
| "step": 1171 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 1.2134228187919463e-05, |
| "loss": 1.3398, |
| "step": 1172 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 1.2127516778523492e-05, |
| "loss": 1.3896, |
| "step": 1173 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 1.2120805369127517e-05, |
| "loss": 1.3672, |
| "step": 1174 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 1.2114093959731545e-05, |
| "loss": 1.3389, |
| "step": 1175 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 1.210738255033557e-05, |
| "loss": 1.3301, |
| "step": 1176 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 1.21006711409396e-05, |
| "loss": 1.3936, |
| "step": 1177 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 1.2093959731543626e-05, |
| "loss": 1.377, |
| "step": 1178 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 1.2087248322147651e-05, |
| "loss": 1.3799, |
| "step": 1179 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 1.208053691275168e-05, |
| "loss": 1.3955, |
| "step": 1180 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 1.2073825503355705e-05, |
| "loss": 1.3682, |
| "step": 1181 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 1.2067114093959733e-05, |
| "loss": 1.3623, |
| "step": 1182 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 1.2060402684563759e-05, |
| "loss": 1.3809, |
| "step": 1183 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 1.2053691275167787e-05, |
| "loss": 1.3525, |
| "step": 1184 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 1.2046979865771812e-05, |
| "loss": 1.3555, |
| "step": 1185 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 1.2040268456375841e-05, |
| "loss": 1.376, |
| "step": 1186 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 1.2033557046979866e-05, |
| "loss": 1.3789, |
| "step": 1187 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 1.2026845637583893e-05, |
| "loss": 1.3643, |
| "step": 1188 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 1.2020134228187921e-05, |
| "loss": 1.3389, |
| "step": 1189 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 1.2013422818791947e-05, |
| "loss": 1.377, |
| "step": 1190 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 1.2006711409395975e-05, |
| "loss": 1.3467, |
| "step": 1191 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 1.2e-05, |
| "loss": 1.3428, |
| "step": 1192 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 1.1993288590604029e-05, |
| "loss": 1.332, |
| "step": 1193 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 1.1986577181208054e-05, |
| "loss": 1.3525, |
| "step": 1194 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 1.1979865771812083e-05, |
| "loss": 1.373, |
| "step": 1195 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 1.1973154362416108e-05, |
| "loss": 1.3516, |
| "step": 1196 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 1.1966442953020135e-05, |
| "loss": 1.3496, |
| "step": 1197 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 1.1959731543624163e-05, |
| "loss": 1.3057, |
| "step": 1198 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 1.1953020134228188e-05, |
| "loss": 1.3682, |
| "step": 1199 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 1.1946308724832217e-05, |
| "loss": 1.3516, |
| "step": 1200 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 1.1939597315436242e-05, |
| "loss": 1.3223, |
| "step": 1201 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 1.193288590604027e-05, |
| "loss": 1.3408, |
| "step": 1202 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 1.1926174496644296e-05, |
| "loss": 1.374, |
| "step": 1203 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 1.1919463087248324e-05, |
| "loss": 1.3525, |
| "step": 1204 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 1.191275167785235e-05, |
| "loss": 1.3242, |
| "step": 1205 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 1.1906040268456376e-05, |
| "loss": 1.3574, |
| "step": 1206 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 1.1899328859060403e-05, |
| "loss": 1.3379, |
| "step": 1207 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 1.189261744966443e-05, |
| "loss": 1.3496, |
| "step": 1208 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 1.1885906040268458e-05, |
| "loss": 1.3838, |
| "step": 1209 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 1.1879194630872484e-05, |
| "loss": 1.3096, |
| "step": 1210 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 1.1872483221476512e-05, |
| "loss": 1.3418, |
| "step": 1211 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 1.1865771812080537e-05, |
| "loss": 1.3662, |
| "step": 1212 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 1.1859060402684566e-05, |
| "loss": 1.3701, |
| "step": 1213 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 1.1852348993288591e-05, |
| "loss": 1.3525, |
| "step": 1214 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 1.1845637583892618e-05, |
| "loss": 1.3486, |
| "step": 1215 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 1.1838926174496645e-05, |
| "loss": 1.3428, |
| "step": 1216 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 1.1832214765100672e-05, |
| "loss": 1.3379, |
| "step": 1217 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 1.18255033557047e-05, |
| "loss": 1.3682, |
| "step": 1218 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 1.1818791946308725e-05, |
| "loss": 1.3682, |
| "step": 1219 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 1.1812080536912754e-05, |
| "loss": 1.3662, |
| "step": 1220 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 1.1805369127516779e-05, |
| "loss": 1.3096, |
| "step": 1221 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 1.1798657718120808e-05, |
| "loss": 1.3467, |
| "step": 1222 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 1.1791946308724833e-05, |
| "loss": 1.3193, |
| "step": 1223 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 1.178523489932886e-05, |
| "loss": 1.3359, |
| "step": 1224 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 1.1778523489932886e-05, |
| "loss": 1.3613, |
| "step": 1225 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 1.1771812080536913e-05, |
| "loss": 1.3184, |
| "step": 1226 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 1.1765100671140942e-05, |
| "loss": 1.3545, |
| "step": 1227 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 1.1758389261744967e-05, |
| "loss": 1.3584, |
| "step": 1228 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 1.1751677852348996e-05, |
| "loss": 1.3633, |
| "step": 1229 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 1.174496644295302e-05, |
| "loss": 1.3428, |
| "step": 1230 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 1.173825503355705e-05, |
| "loss": 1.3242, |
| "step": 1231 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 1.1731543624161074e-05, |
| "loss": 1.3525, |
| "step": 1232 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 1.1724832214765101e-05, |
| "loss": 1.3477, |
| "step": 1233 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 1.1718120805369128e-05, |
| "loss": 1.3486, |
| "step": 1234 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 1.1711409395973155e-05, |
| "loss": 1.3477, |
| "step": 1235 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 1.1704697986577182e-05, |
| "loss": 1.3477, |
| "step": 1236 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 1.1697986577181209e-05, |
| "loss": 1.3887, |
| "step": 1237 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 1.1691275167785237e-05, |
| "loss": 1.3486, |
| "step": 1238 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 1.1684563758389262e-05, |
| "loss": 1.3662, |
| "step": 1239 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 1.1677852348993291e-05, |
| "loss": 1.3828, |
| "step": 1240 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 1.1671140939597316e-05, |
| "loss": 1.3057, |
| "step": 1241 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 1.1664429530201343e-05, |
| "loss": 1.3877, |
| "step": 1242 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 1.165771812080537e-05, |
| "loss": 1.3594, |
| "step": 1243 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 1.1651006711409397e-05, |
| "loss": 1.3643, |
| "step": 1244 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 1.1644295302013424e-05, |
| "loss": 1.3438, |
| "step": 1245 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 1.163758389261745e-05, |
| "loss": 1.3789, |
| "step": 1246 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 1.1630872483221479e-05, |
| "loss": 1.333, |
| "step": 1247 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 1.1624161073825504e-05, |
| "loss": 1.3691, |
| "step": 1248 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 1.1617449664429533e-05, |
| "loss": 1.3604, |
| "step": 1249 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 1.1610738255033558e-05, |
| "loss": 1.3047, |
| "step": 1250 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 1.1604026845637583e-05, |
| "loss": 1.373, |
| "step": 1251 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 1.1597315436241611e-05, |
| "loss": 1.3613, |
| "step": 1252 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 1.1590604026845638e-05, |
| "loss": 1.3223, |
| "step": 1253 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 1.1583892617449665e-05, |
| "loss": 1.3535, |
| "step": 1254 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 1.1577181208053692e-05, |
| "loss": 1.3672, |
| "step": 1255 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 1.1570469798657719e-05, |
| "loss": 1.335, |
| "step": 1256 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 1.1563758389261746e-05, |
| "loss": 1.3799, |
| "step": 1257 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 1.1557046979865774e-05, |
| "loss": 1.3711, |
| "step": 1258 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 1.15503355704698e-05, |
| "loss": 1.3584, |
| "step": 1259 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 1.1543624161073828e-05, |
| "loss": 1.3525, |
| "step": 1260 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 1.1536912751677853e-05, |
| "loss": 1.3564, |
| "step": 1261 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 1.153020134228188e-05, |
| "loss": 1.3525, |
| "step": 1262 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 1.1523489932885907e-05, |
| "loss": 1.3311, |
| "step": 1263 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 1.1516778523489934e-05, |
| "loss": 1.3535, |
| "step": 1264 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 1.151006711409396e-05, |
| "loss": 1.3262, |
| "step": 1265 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 1.1503355704697987e-05, |
| "loss": 1.3379, |
| "step": 1266 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 1.1496644295302016e-05, |
| "loss": 1.3496, |
| "step": 1267 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 1.1489932885906041e-05, |
| "loss": 1.3174, |
| "step": 1268 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 1.148322147651007e-05, |
| "loss": 1.3506, |
| "step": 1269 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 1.1476510067114095e-05, |
| "loss": 1.335, |
| "step": 1270 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 1.146979865771812e-05, |
| "loss": 1.3359, |
| "step": 1271 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 1.1463087248322149e-05, |
| "loss": 1.3291, |
| "step": 1272 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 1.1456375838926175e-05, |
| "loss": 1.3262, |
| "step": 1273 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 1.1449664429530202e-05, |
| "loss": 1.3613, |
| "step": 1274 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 1.1442953020134229e-05, |
| "loss": 1.3477, |
| "step": 1275 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 1.1436241610738256e-05, |
| "loss": 1.3311, |
| "step": 1276 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 1.1429530201342283e-05, |
| "loss": 1.3262, |
| "step": 1277 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 1.1422818791946311e-05, |
| "loss": 1.3447, |
| "step": 1278 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 1.1416107382550337e-05, |
| "loss": 1.3652, |
| "step": 1279 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 1.1409395973154362e-05, |
| "loss": 1.3545, |
| "step": 1280 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 1.140268456375839e-05, |
| "loss": 1.3223, |
| "step": 1281 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 1.1395973154362417e-05, |
| "loss": 1.3398, |
| "step": 1282 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 1.1389261744966444e-05, |
| "loss": 1.3467, |
| "step": 1283 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 1.138255033557047e-05, |
| "loss": 1.376, |
| "step": 1284 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 1.1375838926174498e-05, |
| "loss": 1.3252, |
| "step": 1285 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 1.1369127516778524e-05, |
| "loss": 1.3496, |
| "step": 1286 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 1.1362416107382553e-05, |
| "loss": 1.3594, |
| "step": 1287 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 1.1355704697986578e-05, |
| "loss": 1.3223, |
| "step": 1288 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 1.1348993288590603e-05, |
| "loss": 1.3438, |
| "step": 1289 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 1.1342281879194632e-05, |
| "loss": 1.3721, |
| "step": 1290 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 1.1335570469798659e-05, |
| "loss": 1.3584, |
| "step": 1291 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 1.1328859060402686e-05, |
| "loss": 1.3594, |
| "step": 1292 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 1.1322147651006712e-05, |
| "loss": 1.3623, |
| "step": 1293 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 1.131543624161074e-05, |
| "loss": 1.3516, |
| "step": 1294 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 1.1308724832214766e-05, |
| "loss": 1.3291, |
| "step": 1295 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 1.1302013422818795e-05, |
| "loss": 1.3271, |
| "step": 1296 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 1.129530201342282e-05, |
| "loss": 1.3672, |
| "step": 1297 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 1.1288590604026845e-05, |
| "loss": 1.3672, |
| "step": 1298 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 1.1281879194630874e-05, |
| "loss": 1.3389, |
| "step": 1299 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 1.1275167785234899e-05, |
| "loss": 1.3574, |
| "step": 1300 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 1.1268456375838927e-05, |
| "loss": 1.3516, |
| "step": 1301 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 1.1261744966442954e-05, |
| "loss": 1.335, |
| "step": 1302 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 1.1255033557046981e-05, |
| "loss": 1.3545, |
| "step": 1303 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 1.1248322147651008e-05, |
| "loss": 1.3223, |
| "step": 1304 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 1.1241610738255035e-05, |
| "loss": 1.3447, |
| "step": 1305 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 1.1234899328859062e-05, |
| "loss": 1.3447, |
| "step": 1306 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 1.1228187919463087e-05, |
| "loss": 1.3359, |
| "step": 1307 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 1.1221476510067115e-05, |
| "loss": 1.3066, |
| "step": 1308 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 1.121476510067114e-05, |
| "loss": 1.3613, |
| "step": 1309 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 1.1208053691275169e-05, |
| "loss": 1.3682, |
| "step": 1310 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 1.1201342281879196e-05, |
| "loss": 1.3633, |
| "step": 1311 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 1.1194630872483223e-05, |
| "loss": 1.3652, |
| "step": 1312 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 1.118791946308725e-05, |
| "loss": 1.335, |
| "step": 1313 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 1.1181208053691276e-05, |
| "loss": 1.3311, |
| "step": 1314 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 1.1174496644295303e-05, |
| "loss": 1.3379, |
| "step": 1315 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 1.1167785234899328e-05, |
| "loss": 1.3457, |
| "step": 1316 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 1.1161073825503357e-05, |
| "loss": 1.2842, |
| "step": 1317 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 1.1154362416107382e-05, |
| "loss": 1.3496, |
| "step": 1318 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 1.114765100671141e-05, |
| "loss": 1.3066, |
| "step": 1319 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 1.1140939597315436e-05, |
| "loss": 1.3252, |
| "step": 1320 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 1.1134228187919464e-05, |
| "loss": 1.3496, |
| "step": 1321 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 1.1127516778523491e-05, |
| "loss": 1.3418, |
| "step": 1322 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 1.1120805369127518e-05, |
| "loss": 1.3486, |
| "step": 1323 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 1.1114093959731545e-05, |
| "loss": 1.375, |
| "step": 1324 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 1.110738255033557e-05, |
| "loss": 1.3184, |
| "step": 1325 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 1.1100671140939599e-05, |
| "loss": 1.2959, |
| "step": 1326 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 1.1093959731543624e-05, |
| "loss": 1.3477, |
| "step": 1327 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 1.1087248322147652e-05, |
| "loss": 1.3164, |
| "step": 1328 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 1.1080536912751677e-05, |
| "loss": 1.3574, |
| "step": 1329 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 1.1073825503355706e-05, |
| "loss": 1.3672, |
| "step": 1330 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 1.1067114093959733e-05, |
| "loss": 1.3486, |
| "step": 1331 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 1.106040268456376e-05, |
| "loss": 1.3379, |
| "step": 1332 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 1.1053691275167787e-05, |
| "loss": 1.3135, |
| "step": 1333 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 1.1046979865771812e-05, |
| "loss": 1.3643, |
| "step": 1334 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 1.104026845637584e-05, |
| "loss": 1.3262, |
| "step": 1335 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 1.1033557046979865e-05, |
| "loss": 1.3428, |
| "step": 1336 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 1.1026845637583894e-05, |
| "loss": 1.3154, |
| "step": 1337 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 1.102013422818792e-05, |
| "loss": 1.3232, |
| "step": 1338 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 1.1013422818791948e-05, |
| "loss": 1.3564, |
| "step": 1339 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 1.1006711409395975e-05, |
| "loss": 1.3809, |
| "step": 1340 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 1.1000000000000001e-05, |
| "loss": 1.3291, |
| "step": 1341 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 1.0993288590604028e-05, |
| "loss": 1.3164, |
| "step": 1342 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 1.0986577181208053e-05, |
| "loss": 1.3096, |
| "step": 1343 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 1.0979865771812082e-05, |
| "loss": 1.3594, |
| "step": 1344 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 1.0973154362416107e-05, |
| "loss": 1.3613, |
| "step": 1345 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 1.0966442953020136e-05, |
| "loss": 1.3203, |
| "step": 1346 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 1.095973154362416e-05, |
| "loss": 1.3691, |
| "step": 1347 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 1.095302013422819e-05, |
| "loss": 1.3242, |
| "step": 1348 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 1.0946308724832215e-05, |
| "loss": 1.3291, |
| "step": 1349 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 1.0939597315436243e-05, |
| "loss": 1.3574, |
| "step": 1350 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 1.093288590604027e-05, |
| "loss": 1.333, |
| "step": 1351 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 1.0926174496644297e-05, |
| "loss": 1.3389, |
| "step": 1352 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 1.0919463087248324e-05, |
| "loss": 1.3359, |
| "step": 1353 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 1.0912751677852349e-05, |
| "loss": 1.3838, |
| "step": 1354 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 1.0906040268456377e-05, |
| "loss": 1.3369, |
| "step": 1355 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 1.0899328859060403e-05, |
| "loss": 1.3164, |
| "step": 1356 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 1.0892617449664431e-05, |
| "loss": 1.3643, |
| "step": 1357 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 1.0885906040268456e-05, |
| "loss": 1.3213, |
| "step": 1358 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 1.0879194630872485e-05, |
| "loss": 1.3174, |
| "step": 1359 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 1.0872483221476512e-05, |
| "loss": 1.3496, |
| "step": 1360 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 1.0865771812080538e-05, |
| "loss": 1.3232, |
| "step": 1361 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 1.0859060402684565e-05, |
| "loss": 1.3555, |
| "step": 1362 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 1.085234899328859e-05, |
| "loss": 1.3613, |
| "step": 1363 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 1.0845637583892619e-05, |
| "loss": 1.3486, |
| "step": 1364 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 1.0838926174496644e-05, |
| "loss": 1.3174, |
| "step": 1365 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 1.0832214765100673e-05, |
| "loss": 1.2939, |
| "step": 1366 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 1.0825503355704698e-05, |
| "loss": 1.2998, |
| "step": 1367 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 1.0818791946308726e-05, |
| "loss": 1.3516, |
| "step": 1368 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 1.0812080536912752e-05, |
| "loss": 1.3203, |
| "step": 1369 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 1.080536912751678e-05, |
| "loss": 1.3193, |
| "step": 1370 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 1.0798657718120807e-05, |
| "loss": 1.3438, |
| "step": 1371 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 1.0791946308724832e-05, |
| "loss": 1.3506, |
| "step": 1372 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 1.078523489932886e-05, |
| "loss": 1.3428, |
| "step": 1373 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 1.0778523489932886e-05, |
| "loss": 1.3496, |
| "step": 1374 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 1.0771812080536914e-05, |
| "loss": 1.3467, |
| "step": 1375 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 1.076510067114094e-05, |
| "loss": 1.3213, |
| "step": 1376 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 1.0758389261744968e-05, |
| "loss": 1.333, |
| "step": 1377 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 1.0751677852348993e-05, |
| "loss": 1.3398, |
| "step": 1378 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 1.0744966442953022e-05, |
| "loss": 1.3242, |
| "step": 1379 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 1.0738255033557049e-05, |
| "loss": 1.3232, |
| "step": 1380 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 1.0731543624161074e-05, |
| "loss": 1.3389, |
| "step": 1381 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 1.0724832214765102e-05, |
| "loss": 1.3672, |
| "step": 1382 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 1.0718120805369128e-05, |
| "loss": 1.3203, |
| "step": 1383 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 1.0711409395973156e-05, |
| "loss": 1.3105, |
| "step": 1384 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 1.0704697986577181e-05, |
| "loss": 1.3008, |
| "step": 1385 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 1.069798657718121e-05, |
| "loss": 1.3369, |
| "step": 1386 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 1.0691275167785235e-05, |
| "loss": 1.3672, |
| "step": 1387 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 1.0684563758389264e-05, |
| "loss": 1.3223, |
| "step": 1388 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 1.0677852348993289e-05, |
| "loss": 1.3467, |
| "step": 1389 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 1.0671140939597316e-05, |
| "loss": 1.334, |
| "step": 1390 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 1.0664429530201344e-05, |
| "loss": 1.3018, |
| "step": 1391 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 1.065771812080537e-05, |
| "loss": 1.3438, |
| "step": 1392 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 1.0651006711409398e-05, |
| "loss": 1.3389, |
| "step": 1393 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 1.0644295302013423e-05, |
| "loss": 1.334, |
| "step": 1394 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 1.0637583892617451e-05, |
| "loss": 1.3154, |
| "step": 1395 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 1.0630872483221477e-05, |
| "loss": 1.3262, |
| "step": 1396 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 1.0624161073825505e-05, |
| "loss": 1.3525, |
| "step": 1397 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 1.061744966442953e-05, |
| "loss": 1.3467, |
| "step": 1398 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 1.0610738255033557e-05, |
| "loss": 1.3232, |
| "step": 1399 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 1.0604026845637586e-05, |
| "loss": 1.3184, |
| "step": 1400 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 1.0597315436241611e-05, |
| "loss": 1.3369, |
| "step": 1401 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 1.059060402684564e-05, |
| "loss": 1.3379, |
| "step": 1402 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 1.0583892617449665e-05, |
| "loss": 1.3301, |
| "step": 1403 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 1.0577181208053693e-05, |
| "loss": 1.332, |
| "step": 1404 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 1.0570469798657718e-05, |
| "loss": 1.3115, |
| "step": 1405 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 1.0563758389261747e-05, |
| "loss": 1.3242, |
| "step": 1406 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 1.0557046979865772e-05, |
| "loss": 1.3496, |
| "step": 1407 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 1.0550335570469799e-05, |
| "loss": 1.3203, |
| "step": 1408 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 1.0543624161073827e-05, |
| "loss": 1.3555, |
| "step": 1409 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 1.0536912751677853e-05, |
| "loss": 1.3408, |
| "step": 1410 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 1.0530201342281881e-05, |
| "loss": 1.3135, |
| "step": 1411 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 1.0523489932885906e-05, |
| "loss": 1.3467, |
| "step": 1412 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 1.0516778523489935e-05, |
| "loss": 1.3184, |
| "step": 1413 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 1.051006711409396e-05, |
| "loss": 1.3281, |
| "step": 1414 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 1.0503355704697989e-05, |
| "loss": 1.334, |
| "step": 1415 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 1.0496644295302014e-05, |
| "loss": 1.332, |
| "step": 1416 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 1.048993288590604e-05, |
| "loss": 1.3281, |
| "step": 1417 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 1.0483221476510067e-05, |
| "loss": 1.3516, |
| "step": 1418 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 1.0476510067114094e-05, |
| "loss": 1.3291, |
| "step": 1419 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 1.0469798657718123e-05, |
| "loss": 1.3623, |
| "step": 1420 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 1.0463087248322148e-05, |
| "loss": 1.3379, |
| "step": 1421 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 1.0456375838926177e-05, |
| "loss": 1.3164, |
| "step": 1422 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 1.0449664429530202e-05, |
| "loss": 1.3691, |
| "step": 1423 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 1.044295302013423e-05, |
| "loss": 1.3223, |
| "step": 1424 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 1.0436241610738255e-05, |
| "loss": 1.3125, |
| "step": 1425 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 1.0429530201342282e-05, |
| "loss": 1.3262, |
| "step": 1426 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 1.0422818791946309e-05, |
| "loss": 1.3232, |
| "step": 1427 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 1.0416107382550336e-05, |
| "loss": 1.3252, |
| "step": 1428 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 1.0409395973154364e-05, |
| "loss": 1.3516, |
| "step": 1429 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 1.040268456375839e-05, |
| "loss": 1.3271, |
| "step": 1430 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 1.0395973154362418e-05, |
| "loss": 1.3438, |
| "step": 1431 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 1.0389261744966443e-05, |
| "loss": 1.3379, |
| "step": 1432 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 1.0382550335570472e-05, |
| "loss": 1.3369, |
| "step": 1433 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 1.0375838926174497e-05, |
| "loss": 1.3311, |
| "step": 1434 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 1.0369127516778524e-05, |
| "loss": 1.3652, |
| "step": 1435 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 1.036241610738255e-05, |
| "loss": 1.3125, |
| "step": 1436 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 1.0355704697986578e-05, |
| "loss": 1.3281, |
| "step": 1437 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 1.0348993288590604e-05, |
| "loss": 1.3623, |
| "step": 1438 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 1.0342281879194631e-05, |
| "loss": 1.3145, |
| "step": 1439 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 1.033557046979866e-05, |
| "loss": 1.3311, |
| "step": 1440 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 1.0328859060402685e-05, |
| "loss": 1.3467, |
| "step": 1441 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 1.0322147651006714e-05, |
| "loss": 1.3262, |
| "step": 1442 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 1.0315436241610739e-05, |
| "loss": 1.3359, |
| "step": 1443 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 1.0308724832214767e-05, |
| "loss": 1.3252, |
| "step": 1444 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 1.0302013422818792e-05, |
| "loss": 1.3408, |
| "step": 1445 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 1.029530201342282e-05, |
| "loss": 1.3301, |
| "step": 1446 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 1.0288590604026846e-05, |
| "loss": 1.3281, |
| "step": 1447 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 1.0281879194630873e-05, |
| "loss": 1.3525, |
| "step": 1448 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 1.0275167785234902e-05, |
| "loss": 1.3213, |
| "step": 1449 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 1.0268456375838927e-05, |
| "loss": 1.3662, |
| "step": 1450 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 1.0261744966442955e-05, |
| "loss": 1.3447, |
| "step": 1451 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 1.025503355704698e-05, |
| "loss": 1.3232, |
| "step": 1452 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 1.0248322147651009e-05, |
| "loss": 1.3418, |
| "step": 1453 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 1.0241610738255034e-05, |
| "loss": 1.3369, |
| "step": 1454 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 1.0234899328859061e-05, |
| "loss": 1.3242, |
| "step": 1455 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 1.0228187919463088e-05, |
| "loss": 1.3271, |
| "step": 1456 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 1.0221476510067115e-05, |
| "loss": 1.3174, |
| "step": 1457 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 1.0214765100671142e-05, |
| "loss": 1.3105, |
| "step": 1458 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 1.0208053691275168e-05, |
| "loss": 1.3281, |
| "step": 1459 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 1.0201342281879197e-05, |
| "loss": 1.333, |
| "step": 1460 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 1.0194630872483222e-05, |
| "loss": 1.3291, |
| "step": 1461 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 1.018791946308725e-05, |
| "loss": 1.3105, |
| "step": 1462 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 1.0181208053691276e-05, |
| "loss": 1.3193, |
| "step": 1463 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 1.0174496644295303e-05, |
| "loss": 1.3408, |
| "step": 1464 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 1.016778523489933e-05, |
| "loss": 1.3506, |
| "step": 1465 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 1.0161073825503356e-05, |
| "loss": 1.3242, |
| "step": 1466 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 1.0154362416107383e-05, |
| "loss": 1.3525, |
| "step": 1467 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 1.014765100671141e-05, |
| "loss": 1.3223, |
| "step": 1468 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 1.0140939597315439e-05, |
| "loss": 1.3096, |
| "step": 1469 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 1.0134228187919464e-05, |
| "loss": 1.3125, |
| "step": 1470 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 1.0127516778523492e-05, |
| "loss": 1.3281, |
| "step": 1471 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 1.0120805369127517e-05, |
| "loss": 1.335, |
| "step": 1472 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 1.0114093959731544e-05, |
| "loss": 1.3447, |
| "step": 1473 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 1.0107382550335571e-05, |
| "loss": 1.3008, |
| "step": 1474 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 1.0100671140939598e-05, |
| "loss": 1.3135, |
| "step": 1475 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 1.0093959731543625e-05, |
| "loss": 1.3447, |
| "step": 1476 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 1.0087248322147652e-05, |
| "loss": 1.3379, |
| "step": 1477 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 1.008053691275168e-05, |
| "loss": 1.3623, |
| "step": 1478 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 1.0073825503355705e-05, |
| "loss": 1.3389, |
| "step": 1479 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 1.0067114093959734e-05, |
| "loss": 1.3311, |
| "step": 1480 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 1.0060402684563759e-05, |
| "loss": 1.293, |
| "step": 1481 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 1.0053691275167784e-05, |
| "loss": 1.3018, |
| "step": 1482 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 1.0046979865771813e-05, |
| "loss": 1.3252, |
| "step": 1483 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 1.004026845637584e-05, |
| "loss": 1.3447, |
| "step": 1484 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 1.0033557046979867e-05, |
| "loss": 1.334, |
| "step": 1485 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 1.0026845637583893e-05, |
| "loss": 1.3213, |
| "step": 1486 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 1.002013422818792e-05, |
| "loss": 1.3408, |
| "step": 1487 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 1.0013422818791947e-05, |
| "loss": 1.3408, |
| "step": 1488 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 1.0006711409395976e-05, |
| "loss": 1.2969, |
| "step": 1489 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 1e-05, |
| "loss": 1.3242, |
| "step": 1490 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 9.993288590604028e-06, |
| "loss": 1.3643, |
| "step": 1491 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 9.986577181208055e-06, |
| "loss": 1.3369, |
| "step": 1492 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 9.979865771812081e-06, |
| "loss": 1.334, |
| "step": 1493 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 9.973154362416108e-06, |
| "loss": 1.3271, |
| "step": 1494 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 9.966442953020135e-06, |
| "loss": 1.2998, |
| "step": 1495 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 9.959731543624162e-06, |
| "loss": 1.3145, |
| "step": 1496 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 9.953020134228189e-06, |
| "loss": 1.3047, |
| "step": 1497 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 9.946308724832216e-06, |
| "loss": 1.3477, |
| "step": 1498 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 9.939597315436242e-06, |
| "loss": 1.3242, |
| "step": 1499 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 9.93288590604027e-06, |
| "loss": 1.3477, |
| "step": 1500 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 9.926174496644296e-06, |
| "loss": 1.2852, |
| "step": 1501 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 9.919463087248323e-06, |
| "loss": 1.3066, |
| "step": 1502 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 9.91275167785235e-06, |
| "loss": 1.3545, |
| "step": 1503 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 9.906040268456377e-06, |
| "loss": 1.3301, |
| "step": 1504 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 9.899328859060404e-06, |
| "loss": 1.3135, |
| "step": 1505 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 9.89261744966443e-06, |
| "loss": 1.3057, |
| "step": 1506 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 9.885906040268457e-06, |
| "loss": 1.3184, |
| "step": 1507 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 9.879194630872484e-06, |
| "loss": 1.3438, |
| "step": 1508 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 9.872483221476511e-06, |
| "loss": 1.3057, |
| "step": 1509 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 9.865771812080538e-06, |
| "loss": 1.3066, |
| "step": 1510 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 9.859060402684565e-06, |
| "loss": 1.3262, |
| "step": 1511 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 9.852348993288592e-06, |
| "loss": 1.3213, |
| "step": 1512 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 9.845637583892618e-06, |
| "loss": 1.3281, |
| "step": 1513 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 9.838926174496645e-06, |
| "loss": 1.3389, |
| "step": 1514 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 9.832214765100672e-06, |
| "loss": 1.2969, |
| "step": 1515 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 9.825503355704699e-06, |
| "loss": 1.3037, |
| "step": 1516 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 9.818791946308726e-06, |
| "loss": 1.3389, |
| "step": 1517 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 9.812080536912753e-06, |
| "loss": 1.3203, |
| "step": 1518 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 9.80536912751678e-06, |
| "loss": 1.3525, |
| "step": 1519 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 9.798657718120806e-06, |
| "loss": 1.3193, |
| "step": 1520 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 9.791946308724833e-06, |
| "loss": 1.3301, |
| "step": 1521 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 9.78523489932886e-06, |
| "loss": 1.3105, |
| "step": 1522 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 9.778523489932887e-06, |
| "loss": 1.3301, |
| "step": 1523 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 9.771812080536914e-06, |
| "loss": 1.3389, |
| "step": 1524 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 9.76510067114094e-06, |
| "loss": 1.3613, |
| "step": 1525 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 9.758389261744968e-06, |
| "loss": 1.3193, |
| "step": 1526 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 9.751677852348994e-06, |
| "loss": 1.3398, |
| "step": 1527 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 9.744966442953021e-06, |
| "loss": 1.3018, |
| "step": 1528 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 9.738255033557048e-06, |
| "loss": 1.333, |
| "step": 1529 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 9.731543624161075e-06, |
| "loss": 1.3496, |
| "step": 1530 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 9.724832214765102e-06, |
| "loss": 1.3438, |
| "step": 1531 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 9.718120805369129e-06, |
| "loss": 1.3477, |
| "step": 1532 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 9.711409395973155e-06, |
| "loss": 1.3398, |
| "step": 1533 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 9.704697986577182e-06, |
| "loss": 1.3018, |
| "step": 1534 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 9.69798657718121e-06, |
| "loss": 1.3379, |
| "step": 1535 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 9.691275167785236e-06, |
| "loss": 1.3164, |
| "step": 1536 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 9.684563758389263e-06, |
| "loss": 1.2959, |
| "step": 1537 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 9.67785234899329e-06, |
| "loss": 1.3389, |
| "step": 1538 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 9.671140939597317e-06, |
| "loss": 1.3545, |
| "step": 1539 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 9.664429530201343e-06, |
| "loss": 1.332, |
| "step": 1540 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 9.657718120805369e-06, |
| "loss": 1.3262, |
| "step": 1541 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 9.651006711409397e-06, |
| "loss": 1.3291, |
| "step": 1542 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 9.644295302013424e-06, |
| "loss": 1.3018, |
| "step": 1543 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 9.637583892617451e-06, |
| "loss": 1.3418, |
| "step": 1544 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 9.630872483221478e-06, |
| "loss": 1.3379, |
| "step": 1545 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 9.624161073825505e-06, |
| "loss": 1.3086, |
| "step": 1546 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 9.617449664429531e-06, |
| "loss": 1.3574, |
| "step": 1547 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 9.610738255033558e-06, |
| "loss": 1.2939, |
| "step": 1548 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 9.604026845637585e-06, |
| "loss": 1.3301, |
| "step": 1549 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 9.59731543624161e-06, |
| "loss": 1.3027, |
| "step": 1550 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 9.590604026845637e-06, |
| "loss": 1.3359, |
| "step": 1551 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 9.583892617449666e-06, |
| "loss": 1.3223, |
| "step": 1552 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 9.577181208053693e-06, |
| "loss": 1.332, |
| "step": 1553 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 9.57046979865772e-06, |
| "loss": 1.2822, |
| "step": 1554 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 9.563758389261746e-06, |
| "loss": 1.3213, |
| "step": 1555 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 9.557046979865773e-06, |
| "loss": 1.3301, |
| "step": 1556 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 9.5503355704698e-06, |
| "loss": 1.3467, |
| "step": 1557 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 9.543624161073827e-06, |
| "loss": 1.3125, |
| "step": 1558 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 9.536912751677852e-06, |
| "loss": 1.3047, |
| "step": 1559 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 9.530201342281879e-06, |
| "loss": 1.3203, |
| "step": 1560 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 9.523489932885906e-06, |
| "loss": 1.3086, |
| "step": 1561 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 9.516778523489934e-06, |
| "loss": 1.335, |
| "step": 1562 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 9.510067114093961e-06, |
| "loss": 1.335, |
| "step": 1563 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 9.503355704697988e-06, |
| "loss": 1.3184, |
| "step": 1564 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 9.496644295302015e-06, |
| "loss": 1.3428, |
| "step": 1565 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 9.489932885906042e-06, |
| "loss": 1.3213, |
| "step": 1566 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 9.483221476510069e-06, |
| "loss": 1.3027, |
| "step": 1567 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 9.476510067114095e-06, |
| "loss": 1.3281, |
| "step": 1568 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 9.46979865771812e-06, |
| "loss": 1.3135, |
| "step": 1569 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 9.463087248322147e-06, |
| "loss": 1.3066, |
| "step": 1570 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 9.456375838926174e-06, |
| "loss": 1.3145, |
| "step": 1571 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 9.449664429530203e-06, |
| "loss": 1.3438, |
| "step": 1572 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 9.44295302013423e-06, |
| "loss": 1.3213, |
| "step": 1573 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 9.436241610738256e-06, |
| "loss": 1.2949, |
| "step": 1574 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 9.429530201342283e-06, |
| "loss": 1.2881, |
| "step": 1575 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 9.42281879194631e-06, |
| "loss": 1.3457, |
| "step": 1576 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 9.416107382550337e-06, |
| "loss": 1.3164, |
| "step": 1577 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 9.409395973154362e-06, |
| "loss": 1.334, |
| "step": 1578 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 9.402684563758389e-06, |
| "loss": 1.3574, |
| "step": 1579 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 9.395973154362416e-06, |
| "loss": 1.3311, |
| "step": 1580 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 9.389261744966444e-06, |
| "loss": 1.3369, |
| "step": 1581 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 9.382550335570471e-06, |
| "loss": 1.3447, |
| "step": 1582 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 9.375838926174498e-06, |
| "loss": 1.3486, |
| "step": 1583 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 9.369127516778525e-06, |
| "loss": 1.3213, |
| "step": 1584 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 9.362416107382552e-06, |
| "loss": 1.2988, |
| "step": 1585 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 9.355704697986579e-06, |
| "loss": 1.3418, |
| "step": 1586 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 9.348993288590604e-06, |
| "loss": 1.3232, |
| "step": 1587 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 9.34228187919463e-06, |
| "loss": 1.3037, |
| "step": 1588 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 9.335570469798658e-06, |
| "loss": 1.3135, |
| "step": 1589 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 9.328859060402684e-06, |
| "loss": 1.3438, |
| "step": 1590 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 9.322147651006713e-06, |
| "loss": 1.3164, |
| "step": 1591 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 9.31543624161074e-06, |
| "loss": 1.2754, |
| "step": 1592 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 9.308724832214767e-06, |
| "loss": 1.29, |
| "step": 1593 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 9.302013422818794e-06, |
| "loss": 1.3232, |
| "step": 1594 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 9.29530201342282e-06, |
| "loss": 1.3213, |
| "step": 1595 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 9.288590604026846e-06, |
| "loss": 1.3506, |
| "step": 1596 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 9.281879194630872e-06, |
| "loss": 1.3379, |
| "step": 1597 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 9.2751677852349e-06, |
| "loss": 1.3096, |
| "step": 1598 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 9.268456375838926e-06, |
| "loss": 1.3389, |
| "step": 1599 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 9.261744966442953e-06, |
| "loss": 1.3076, |
| "step": 1600 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 9.255033557046982e-06, |
| "loss": 1.3418, |
| "step": 1601 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 9.248322147651008e-06, |
| "loss": 1.2998, |
| "step": 1602 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 9.241610738255035e-06, |
| "loss": 1.3291, |
| "step": 1603 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 9.234899328859062e-06, |
| "loss": 1.3359, |
| "step": 1604 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 9.228187919463087e-06, |
| "loss": 1.2939, |
| "step": 1605 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 9.221476510067114e-06, |
| "loss": 1.3369, |
| "step": 1606 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 9.214765100671141e-06, |
| "loss": 1.335, |
| "step": 1607 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 9.208053691275168e-06, |
| "loss": 1.3262, |
| "step": 1608 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 9.201342281879195e-06, |
| "loss": 1.3047, |
| "step": 1609 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 9.194630872483221e-06, |
| "loss": 1.3164, |
| "step": 1610 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 9.18791946308725e-06, |
| "loss": 1.335, |
| "step": 1611 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 9.181208053691277e-06, |
| "loss": 1.3018, |
| "step": 1612 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 9.174496644295304e-06, |
| "loss": 1.3125, |
| "step": 1613 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 9.16778523489933e-06, |
| "loss": 1.3525, |
| "step": 1614 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 9.161073825503356e-06, |
| "loss": 1.3506, |
| "step": 1615 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 9.154362416107383e-06, |
| "loss": 1.3135, |
| "step": 1616 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 9.14765100671141e-06, |
| "loss": 1.293, |
| "step": 1617 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 9.140939597315436e-06, |
| "loss": 1.3076, |
| "step": 1618 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 9.134228187919463e-06, |
| "loss": 1.3164, |
| "step": 1619 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 9.12751677852349e-06, |
| "loss": 1.3057, |
| "step": 1620 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 9.120805369127519e-06, |
| "loss": 1.3154, |
| "step": 1621 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 9.114093959731545e-06, |
| "loss": 1.2988, |
| "step": 1622 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 9.107382550335572e-06, |
| "loss": 1.3066, |
| "step": 1623 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 9.100671140939597e-06, |
| "loss": 1.2783, |
| "step": 1624 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 9.093959731543624e-06, |
| "loss": 1.291, |
| "step": 1625 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 9.087248322147651e-06, |
| "loss": 1.3154, |
| "step": 1626 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 9.080536912751678e-06, |
| "loss": 1.2949, |
| "step": 1627 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 9.073825503355705e-06, |
| "loss": 1.3027, |
| "step": 1628 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 9.067114093959732e-06, |
| "loss": 1.3115, |
| "step": 1629 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 9.060402684563759e-06, |
| "loss": 1.2803, |
| "step": 1630 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 9.053691275167787e-06, |
| "loss": 1.3262, |
| "step": 1631 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 9.046979865771814e-06, |
| "loss": 1.3174, |
| "step": 1632 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 9.040268456375839e-06, |
| "loss": 1.3125, |
| "step": 1633 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 9.033557046979866e-06, |
| "loss": 1.2861, |
| "step": 1634 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 9.026845637583893e-06, |
| "loss": 1.3037, |
| "step": 1635 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 9.02013422818792e-06, |
| "loss": 1.3018, |
| "step": 1636 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 9.013422818791947e-06, |
| "loss": 1.3281, |
| "step": 1637 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 9.006711409395973e-06, |
| "loss": 1.3408, |
| "step": 1638 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 9e-06, |
| "loss": 1.3105, |
| "step": 1639 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 8.993288590604027e-06, |
| "loss": 1.3135, |
| "step": 1640 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 8.986577181208056e-06, |
| "loss": 1.3252, |
| "step": 1641 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 8.97986577181208e-06, |
| "loss": 1.3428, |
| "step": 1642 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 8.973154362416108e-06, |
| "loss": 1.3145, |
| "step": 1643 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 8.966442953020134e-06, |
| "loss": 1.3203, |
| "step": 1644 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 8.959731543624161e-06, |
| "loss": 1.3311, |
| "step": 1645 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 8.953020134228188e-06, |
| "loss": 1.3232, |
| "step": 1646 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 8.946308724832215e-06, |
| "loss": 1.3066, |
| "step": 1647 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 8.939597315436242e-06, |
| "loss": 1.3164, |
| "step": 1648 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 8.932885906040269e-06, |
| "loss": 1.3428, |
| "step": 1649 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 8.926174496644297e-06, |
| "loss": 1.3105, |
| "step": 1650 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 8.919463087248322e-06, |
| "loss": 1.3018, |
| "step": 1651 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 8.91275167785235e-06, |
| "loss": 1.2891, |
| "step": 1652 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 8.906040268456376e-06, |
| "loss": 1.332, |
| "step": 1653 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 8.899328859060403e-06, |
| "loss": 1.3066, |
| "step": 1654 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 8.89261744966443e-06, |
| "loss": 1.3428, |
| "step": 1655 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 8.885906040268457e-06, |
| "loss": 1.3438, |
| "step": 1656 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 8.879194630872484e-06, |
| "loss": 1.3115, |
| "step": 1657 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 8.87248322147651e-06, |
| "loss": 1.3232, |
| "step": 1658 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 8.865771812080537e-06, |
| "loss": 1.335, |
| "step": 1659 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 8.859060402684566e-06, |
| "loss": 1.3154, |
| "step": 1660 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 8.852348993288591e-06, |
| "loss": 1.3125, |
| "step": 1661 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 8.845637583892618e-06, |
| "loss": 1.3008, |
| "step": 1662 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 8.838926174496645e-06, |
| "loss": 1.3564, |
| "step": 1663 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 8.832214765100672e-06, |
| "loss": 1.3428, |
| "step": 1664 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 8.825503355704698e-06, |
| "loss": 1.3096, |
| "step": 1665 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 8.818791946308725e-06, |
| "loss": 1.3184, |
| "step": 1666 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 8.812080536912752e-06, |
| "loss": 1.29, |
| "step": 1667 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 8.805369127516779e-06, |
| "loss": 1.3369, |
| "step": 1668 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 8.798657718120806e-06, |
| "loss": 1.3271, |
| "step": 1669 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 8.791946308724833e-06, |
| "loss": 1.3311, |
| "step": 1670 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 8.78523489932886e-06, |
| "loss": 1.335, |
| "step": 1671 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 8.778523489932886e-06, |
| "loss": 1.3418, |
| "step": 1672 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 8.771812080536913e-06, |
| "loss": 1.3057, |
| "step": 1673 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 8.76510067114094e-06, |
| "loss": 1.3262, |
| "step": 1674 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 8.758389261744967e-06, |
| "loss": 1.3438, |
| "step": 1675 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 8.751677852348994e-06, |
| "loss": 1.2744, |
| "step": 1676 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 8.74496644295302e-06, |
| "loss": 1.3418, |
| "step": 1677 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 8.738255033557047e-06, |
| "loss": 1.2988, |
| "step": 1678 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 8.731543624161074e-06, |
| "loss": 1.3154, |
| "step": 1679 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 8.724832214765101e-06, |
| "loss": 1.3008, |
| "step": 1680 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 8.718120805369128e-06, |
| "loss": 1.3311, |
| "step": 1681 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 8.711409395973155e-06, |
| "loss": 1.2881, |
| "step": 1682 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 8.704697986577182e-06, |
| "loss": 1.3311, |
| "step": 1683 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 8.697986577181209e-06, |
| "loss": 1.3379, |
| "step": 1684 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 8.691275167785235e-06, |
| "loss": 1.3389, |
| "step": 1685 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 8.684563758389262e-06, |
| "loss": 1.3174, |
| "step": 1686 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 8.67785234899329e-06, |
| "loss": 1.3311, |
| "step": 1687 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 8.671140939597316e-06, |
| "loss": 1.3037, |
| "step": 1688 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 8.664429530201343e-06, |
| "loss": 1.3418, |
| "step": 1689 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 8.65771812080537e-06, |
| "loss": 1.3262, |
| "step": 1690 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 8.651006711409397e-06, |
| "loss": 1.3193, |
| "step": 1691 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 8.644295302013423e-06, |
| "loss": 1.3369, |
| "step": 1692 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 8.63758389261745e-06, |
| "loss": 1.3105, |
| "step": 1693 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 8.630872483221477e-06, |
| "loss": 1.3174, |
| "step": 1694 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 8.624161073825504e-06, |
| "loss": 1.3125, |
| "step": 1695 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 8.617449664429531e-06, |
| "loss": 1.3008, |
| "step": 1696 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 8.610738255033558e-06, |
| "loss": 1.3125, |
| "step": 1697 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 8.604026845637585e-06, |
| "loss": 1.3125, |
| "step": 1698 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 8.597315436241611e-06, |
| "loss": 1.3506, |
| "step": 1699 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 8.590604026845638e-06, |
| "loss": 1.3232, |
| "step": 1700 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 8.583892617449665e-06, |
| "loss": 1.3359, |
| "step": 1701 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 8.577181208053692e-06, |
| "loss": 1.3164, |
| "step": 1702 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 8.570469798657719e-06, |
| "loss": 1.3174, |
| "step": 1703 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 8.563758389261746e-06, |
| "loss": 1.2852, |
| "step": 1704 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 8.557046979865773e-06, |
| "loss": 1.3105, |
| "step": 1705 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 8.5503355704698e-06, |
| "loss": 1.3096, |
| "step": 1706 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 8.543624161073826e-06, |
| "loss": 1.29, |
| "step": 1707 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 8.536912751677853e-06, |
| "loss": 1.3135, |
| "step": 1708 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 8.53020134228188e-06, |
| "loss": 1.3135, |
| "step": 1709 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 8.523489932885907e-06, |
| "loss": 1.3096, |
| "step": 1710 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 8.516778523489934e-06, |
| "loss": 1.3145, |
| "step": 1711 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 8.51006711409396e-06, |
| "loss": 1.3027, |
| "step": 1712 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 8.503355704697987e-06, |
| "loss": 1.335, |
| "step": 1713 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 8.496644295302014e-06, |
| "loss": 1.3027, |
| "step": 1714 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 8.489932885906041e-06, |
| "loss": 1.3184, |
| "step": 1715 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 8.483221476510068e-06, |
| "loss": 1.3066, |
| "step": 1716 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 8.476510067114095e-06, |
| "loss": 1.3057, |
| "step": 1717 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 8.469798657718122e-06, |
| "loss": 1.292, |
| "step": 1718 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 8.463087248322148e-06, |
| "loss": 1.3271, |
| "step": 1719 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 8.456375838926175e-06, |
| "loss": 1.3311, |
| "step": 1720 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 8.449664429530202e-06, |
| "loss": 1.3174, |
| "step": 1721 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 8.442953020134229e-06, |
| "loss": 1.2891, |
| "step": 1722 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 8.436241610738256e-06, |
| "loss": 1.2891, |
| "step": 1723 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 8.429530201342283e-06, |
| "loss": 1.3184, |
| "step": 1724 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 8.42281879194631e-06, |
| "loss": 1.3115, |
| "step": 1725 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 8.416107382550336e-06, |
| "loss": 1.2891, |
| "step": 1726 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 8.409395973154363e-06, |
| "loss": 1.3242, |
| "step": 1727 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 8.40268456375839e-06, |
| "loss": 1.3018, |
| "step": 1728 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 8.395973154362417e-06, |
| "loss": 1.3125, |
| "step": 1729 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 8.389261744966444e-06, |
| "loss": 1.2998, |
| "step": 1730 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 8.38255033557047e-06, |
| "loss": 1.3135, |
| "step": 1731 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 8.375838926174498e-06, |
| "loss": 1.3096, |
| "step": 1732 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 8.369127516778524e-06, |
| "loss": 1.3164, |
| "step": 1733 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 8.362416107382551e-06, |
| "loss": 1.3369, |
| "step": 1734 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 8.355704697986578e-06, |
| "loss": 1.3203, |
| "step": 1735 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 8.348993288590605e-06, |
| "loss": 1.3311, |
| "step": 1736 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 8.342281879194632e-06, |
| "loss": 1.3193, |
| "step": 1737 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 8.335570469798659e-06, |
| "loss": 1.3105, |
| "step": 1738 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 8.328859060402686e-06, |
| "loss": 1.2998, |
| "step": 1739 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 8.322147651006712e-06, |
| "loss": 1.3398, |
| "step": 1740 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 8.31543624161074e-06, |
| "loss": 1.3477, |
| "step": 1741 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 8.308724832214766e-06, |
| "loss": 1.3291, |
| "step": 1742 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 8.302013422818791e-06, |
| "loss": 1.3232, |
| "step": 1743 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 8.29530201342282e-06, |
| "loss": 1.3018, |
| "step": 1744 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 8.288590604026847e-06, |
| "loss": 1.3281, |
| "step": 1745 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 8.281879194630874e-06, |
| "loss": 1.3066, |
| "step": 1746 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 8.2751677852349e-06, |
| "loss": 1.3105, |
| "step": 1747 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 8.268456375838927e-06, |
| "loss": 1.3242, |
| "step": 1748 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 8.261744966442954e-06, |
| "loss": 1.2891, |
| "step": 1749 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 8.255033557046981e-06, |
| "loss": 1.2881, |
| "step": 1750 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 8.248322147651008e-06, |
| "loss": 1.332, |
| "step": 1751 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 8.241610738255035e-06, |
| "loss": 1.2979, |
| "step": 1752 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 8.23489932885906e-06, |
| "loss": 1.2871, |
| "step": 1753 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 8.228187919463088e-06, |
| "loss": 1.2979, |
| "step": 1754 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 8.221476510067115e-06, |
| "loss": 1.3027, |
| "step": 1755 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 8.214765100671142e-06, |
| "loss": 1.335, |
| "step": 1756 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 8.208053691275169e-06, |
| "loss": 1.3115, |
| "step": 1757 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 8.201342281879196e-06, |
| "loss": 1.3291, |
| "step": 1758 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 8.194630872483223e-06, |
| "loss": 1.3564, |
| "step": 1759 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 8.18791946308725e-06, |
| "loss": 1.3281, |
| "step": 1760 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 8.181208053691276e-06, |
| "loss": 1.3311, |
| "step": 1761 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 8.174496644295301e-06, |
| "loss": 1.2949, |
| "step": 1762 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 8.16778523489933e-06, |
| "loss": 1.3164, |
| "step": 1763 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 8.161073825503357e-06, |
| "loss": 1.3184, |
| "step": 1764 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 8.154362416107384e-06, |
| "loss": 1.3076, |
| "step": 1765 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 8.14765100671141e-06, |
| "loss": 1.3223, |
| "step": 1766 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 8.140939597315437e-06, |
| "loss": 1.2754, |
| "step": 1767 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 8.134228187919464e-06, |
| "loss": 1.3623, |
| "step": 1768 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 8.127516778523491e-06, |
| "loss": 1.3174, |
| "step": 1769 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 8.120805369127518e-06, |
| "loss": 1.292, |
| "step": 1770 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 8.114093959731543e-06, |
| "loss": 1.333, |
| "step": 1771 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 8.10738255033557e-06, |
| "loss": 1.2939, |
| "step": 1772 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 8.100671140939599e-06, |
| "loss": 1.3301, |
| "step": 1773 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 8.093959731543625e-06, |
| "loss": 1.3135, |
| "step": 1774 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 8.087248322147652e-06, |
| "loss": 1.3057, |
| "step": 1775 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 8.080536912751679e-06, |
| "loss": 1.335, |
| "step": 1776 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 8.073825503355706e-06, |
| "loss": 1.335, |
| "step": 1777 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 8.067114093959733e-06, |
| "loss": 1.3184, |
| "step": 1778 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 8.06040268456376e-06, |
| "loss": 1.3154, |
| "step": 1779 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 8.053691275167785e-06, |
| "loss": 1.3037, |
| "step": 1780 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 8.046979865771812e-06, |
| "loss": 1.3232, |
| "step": 1781 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 8.040268456375839e-06, |
| "loss": 1.2988, |
| "step": 1782 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 8.033557046979867e-06, |
| "loss": 1.3105, |
| "step": 1783 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 8.026845637583894e-06, |
| "loss": 1.3037, |
| "step": 1784 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 8.02013422818792e-06, |
| "loss": 1.3105, |
| "step": 1785 |
| }, |
| { |
| "epoch": 3.0, |
| "learning_rate": 8.013422818791948e-06, |
| "loss": 1.291, |
| "step": 1786 |
| }, |
| { |
| "epoch": 3.0, |
| "learning_rate": 8.006711409395974e-06, |
| "loss": 1.2998, |
| "step": 1787 |
| }, |
| { |
| "epoch": 3.0, |
| "learning_rate": 8.000000000000001e-06, |
| "loss": 1.2754, |
| "step": 1788 |
| }, |
| { |
| "epoch": 3.0, |
| "learning_rate": 7.993288590604026e-06, |
| "loss": 1.3086, |
| "step": 1789 |
| }, |
| { |
| "epoch": 3.0, |
| "learning_rate": 7.986577181208053e-06, |
| "loss": 1.3135, |
| "step": 1790 |
| }, |
| { |
| "epoch": 3.01, |
| "learning_rate": 7.97986577181208e-06, |
| "loss": 1.3076, |
| "step": 1791 |
| }, |
| { |
| "epoch": 3.01, |
| "learning_rate": 7.973154362416107e-06, |
| "loss": 1.2832, |
| "step": 1792 |
| }, |
| { |
| "epoch": 3.01, |
| "learning_rate": 7.966442953020136e-06, |
| "loss": 1.3223, |
| "step": 1793 |
| }, |
| { |
| "epoch": 3.01, |
| "learning_rate": 7.959731543624162e-06, |
| "loss": 1.3066, |
| "step": 1794 |
| }, |
| { |
| "epoch": 3.01, |
| "learning_rate": 7.95302013422819e-06, |
| "loss": 1.2773, |
| "step": 1795 |
| }, |
| { |
| "epoch": 3.01, |
| "learning_rate": 7.946308724832216e-06, |
| "loss": 1.2832, |
| "step": 1796 |
| }, |
| { |
| "epoch": 3.02, |
| "learning_rate": 7.939597315436243e-06, |
| "loss": 1.3154, |
| "step": 1797 |
| }, |
| { |
| "epoch": 3.02, |
| "learning_rate": 7.93288590604027e-06, |
| "loss": 1.2959, |
| "step": 1798 |
| }, |
| { |
| "epoch": 3.02, |
| "learning_rate": 7.926174496644295e-06, |
| "loss": 1.3252, |
| "step": 1799 |
| }, |
| { |
| "epoch": 3.02, |
| "learning_rate": 7.919463087248322e-06, |
| "loss": 1.3262, |
| "step": 1800 |
| }, |
| { |
| "epoch": 3.02, |
| "learning_rate": 7.912751677852349e-06, |
| "loss": 1.3115, |
| "step": 1801 |
| }, |
| { |
| "epoch": 3.02, |
| "learning_rate": 7.906040268456376e-06, |
| "loss": 1.2842, |
| "step": 1802 |
| }, |
| { |
| "epoch": 3.03, |
| "learning_rate": 7.899328859060404e-06, |
| "loss": 1.3027, |
| "step": 1803 |
| }, |
| { |
| "epoch": 3.03, |
| "learning_rate": 7.892617449664431e-06, |
| "loss": 1.3135, |
| "step": 1804 |
| }, |
| { |
| "epoch": 3.03, |
| "learning_rate": 7.885906040268458e-06, |
| "loss": 1.2832, |
| "step": 1805 |
| }, |
| { |
| "epoch": 3.03, |
| "learning_rate": 7.879194630872485e-06, |
| "loss": 1.292, |
| "step": 1806 |
| }, |
| { |
| "epoch": 3.03, |
| "learning_rate": 7.872483221476512e-06, |
| "loss": 1.3076, |
| "step": 1807 |
| }, |
| { |
| "epoch": 3.03, |
| "learning_rate": 7.865771812080537e-06, |
| "loss": 1.3027, |
| "step": 1808 |
| }, |
| { |
| "epoch": 3.04, |
| "learning_rate": 7.859060402684564e-06, |
| "loss": 1.3047, |
| "step": 1809 |
| }, |
| { |
| "epoch": 3.04, |
| "learning_rate": 7.85234899328859e-06, |
| "loss": 1.2832, |
| "step": 1810 |
| }, |
| { |
| "epoch": 3.04, |
| "learning_rate": 7.845637583892617e-06, |
| "loss": 1.3057, |
| "step": 1811 |
| }, |
| { |
| "epoch": 3.04, |
| "learning_rate": 7.838926174496644e-06, |
| "loss": 1.2871, |
| "step": 1812 |
| }, |
| { |
| "epoch": 3.04, |
| "learning_rate": 7.832214765100673e-06, |
| "loss": 1.293, |
| "step": 1813 |
| }, |
| { |
| "epoch": 3.04, |
| "learning_rate": 7.8255033557047e-06, |
| "loss": 1.3262, |
| "step": 1814 |
| }, |
| { |
| "epoch": 3.05, |
| "learning_rate": 7.818791946308726e-06, |
| "loss": 1.3125, |
| "step": 1815 |
| }, |
| { |
| "epoch": 3.05, |
| "learning_rate": 7.812080536912753e-06, |
| "loss": 1.3232, |
| "step": 1816 |
| }, |
| { |
| "epoch": 3.05, |
| "learning_rate": 7.805369127516778e-06, |
| "loss": 1.3174, |
| "step": 1817 |
| }, |
| { |
| "epoch": 3.05, |
| "learning_rate": 7.798657718120805e-06, |
| "loss": 1.2725, |
| "step": 1818 |
| }, |
| { |
| "epoch": 3.05, |
| "learning_rate": 7.791946308724832e-06, |
| "loss": 1.2852, |
| "step": 1819 |
| }, |
| { |
| "epoch": 3.05, |
| "learning_rate": 7.785234899328859e-06, |
| "loss": 1.3057, |
| "step": 1820 |
| }, |
| { |
| "epoch": 3.06, |
| "learning_rate": 7.778523489932886e-06, |
| "loss": 1.2744, |
| "step": 1821 |
| }, |
| { |
| "epoch": 3.06, |
| "learning_rate": 7.771812080536914e-06, |
| "loss": 1.3076, |
| "step": 1822 |
| }, |
| { |
| "epoch": 3.06, |
| "learning_rate": 7.765100671140941e-06, |
| "loss": 1.3135, |
| "step": 1823 |
| }, |
| { |
| "epoch": 3.06, |
| "learning_rate": 7.758389261744968e-06, |
| "loss": 1.2734, |
| "step": 1824 |
| }, |
| { |
| "epoch": 3.06, |
| "learning_rate": 7.751677852348995e-06, |
| "loss": 1.2832, |
| "step": 1825 |
| }, |
| { |
| "epoch": 3.06, |
| "learning_rate": 7.74496644295302e-06, |
| "loss": 1.3125, |
| "step": 1826 |
| }, |
| { |
| "epoch": 3.07, |
| "learning_rate": 7.738255033557047e-06, |
| "loss": 1.2959, |
| "step": 1827 |
| }, |
| { |
| "epoch": 3.07, |
| "learning_rate": 7.731543624161074e-06, |
| "loss": 1.2871, |
| "step": 1828 |
| }, |
| { |
| "epoch": 3.07, |
| "learning_rate": 7.7248322147651e-06, |
| "loss": 1.2832, |
| "step": 1829 |
| }, |
| { |
| "epoch": 3.07, |
| "learning_rate": 7.718120805369127e-06, |
| "loss": 1.2666, |
| "step": 1830 |
| }, |
| { |
| "epoch": 3.07, |
| "learning_rate": 7.711409395973154e-06, |
| "loss": 1.2852, |
| "step": 1831 |
| }, |
| { |
| "epoch": 3.07, |
| "learning_rate": 7.704697986577183e-06, |
| "loss": 1.2998, |
| "step": 1832 |
| }, |
| { |
| "epoch": 3.08, |
| "learning_rate": 7.69798657718121e-06, |
| "loss": 1.2891, |
| "step": 1833 |
| }, |
| { |
| "epoch": 3.08, |
| "learning_rate": 7.691275167785237e-06, |
| "loss": 1.3203, |
| "step": 1834 |
| }, |
| { |
| "epoch": 3.08, |
| "learning_rate": 7.684563758389262e-06, |
| "loss": 1.292, |
| "step": 1835 |
| }, |
| { |
| "epoch": 3.08, |
| "learning_rate": 7.677852348993289e-06, |
| "loss": 1.3076, |
| "step": 1836 |
| }, |
| { |
| "epoch": 3.08, |
| "learning_rate": 7.671140939597315e-06, |
| "loss": 1.3008, |
| "step": 1837 |
| }, |
| { |
| "epoch": 3.08, |
| "learning_rate": 7.664429530201342e-06, |
| "loss": 1.3164, |
| "step": 1838 |
| }, |
| { |
| "epoch": 3.09, |
| "learning_rate": 7.657718120805369e-06, |
| "loss": 1.2822, |
| "step": 1839 |
| }, |
| { |
| "epoch": 3.09, |
| "learning_rate": 7.651006711409396e-06, |
| "loss": 1.3047, |
| "step": 1840 |
| }, |
| { |
| "epoch": 3.09, |
| "learning_rate": 7.644295302013423e-06, |
| "loss": 1.2842, |
| "step": 1841 |
| }, |
| { |
| "epoch": 3.09, |
| "learning_rate": 7.637583892617451e-06, |
| "loss": 1.3174, |
| "step": 1842 |
| }, |
| { |
| "epoch": 3.09, |
| "learning_rate": 7.630872483221478e-06, |
| "loss": 1.2793, |
| "step": 1843 |
| }, |
| { |
| "epoch": 3.09, |
| "learning_rate": 7.624161073825504e-06, |
| "loss": 1.2832, |
| "step": 1844 |
| }, |
| { |
| "epoch": 3.1, |
| "learning_rate": 7.61744966442953e-06, |
| "loss": 1.3018, |
| "step": 1845 |
| }, |
| { |
| "epoch": 3.1, |
| "learning_rate": 7.610738255033557e-06, |
| "loss": 1.3174, |
| "step": 1846 |
| }, |
| { |
| "epoch": 3.1, |
| "learning_rate": 7.604026845637584e-06, |
| "loss": 1.3262, |
| "step": 1847 |
| }, |
| { |
| "epoch": 3.1, |
| "learning_rate": 7.597315436241612e-06, |
| "loss": 1.3086, |
| "step": 1848 |
| }, |
| { |
| "epoch": 3.1, |
| "learning_rate": 7.5906040268456385e-06, |
| "loss": 1.3164, |
| "step": 1849 |
| }, |
| { |
| "epoch": 3.1, |
| "learning_rate": 7.583892617449665e-06, |
| "loss": 1.3281, |
| "step": 1850 |
| }, |
| { |
| "epoch": 3.11, |
| "learning_rate": 7.577181208053692e-06, |
| "loss": 1.2959, |
| "step": 1851 |
| }, |
| { |
| "epoch": 3.11, |
| "learning_rate": 7.570469798657719e-06, |
| "loss": 1.2578, |
| "step": 1852 |
| }, |
| { |
| "epoch": 3.11, |
| "learning_rate": 7.563758389261746e-06, |
| "loss": 1.3213, |
| "step": 1853 |
| }, |
| { |
| "epoch": 3.11, |
| "learning_rate": 7.557046979865772e-06, |
| "loss": 1.2764, |
| "step": 1854 |
| }, |
| { |
| "epoch": 3.11, |
| "learning_rate": 7.550335570469799e-06, |
| "loss": 1.3076, |
| "step": 1855 |
| }, |
| { |
| "epoch": 3.11, |
| "learning_rate": 7.543624161073826e-06, |
| "loss": 1.3301, |
| "step": 1856 |
| }, |
| { |
| "epoch": 3.12, |
| "learning_rate": 7.5369127516778525e-06, |
| "loss": 1.3213, |
| "step": 1857 |
| }, |
| { |
| "epoch": 3.12, |
| "learning_rate": 7.53020134228188e-06, |
| "loss": 1.3096, |
| "step": 1858 |
| }, |
| { |
| "epoch": 3.12, |
| "learning_rate": 7.523489932885907e-06, |
| "loss": 1.293, |
| "step": 1859 |
| }, |
| { |
| "epoch": 3.12, |
| "learning_rate": 7.516778523489934e-06, |
| "loss": 1.3037, |
| "step": 1860 |
| }, |
| { |
| "epoch": 3.12, |
| "learning_rate": 7.510067114093961e-06, |
| "loss": 1.3047, |
| "step": 1861 |
| }, |
| { |
| "epoch": 3.12, |
| "learning_rate": 7.503355704697988e-06, |
| "loss": 1.3027, |
| "step": 1862 |
| }, |
| { |
| "epoch": 3.13, |
| "learning_rate": 7.496644295302014e-06, |
| "loss": 1.3027, |
| "step": 1863 |
| }, |
| { |
| "epoch": 3.13, |
| "learning_rate": 7.4899328859060405e-06, |
| "loss": 1.3086, |
| "step": 1864 |
| }, |
| { |
| "epoch": 3.13, |
| "learning_rate": 7.483221476510067e-06, |
| "loss": 1.2588, |
| "step": 1865 |
| }, |
| { |
| "epoch": 3.13, |
| "learning_rate": 7.476510067114094e-06, |
| "loss": 1.2783, |
| "step": 1866 |
| }, |
| { |
| "epoch": 3.13, |
| "learning_rate": 7.469798657718121e-06, |
| "loss": 1.2939, |
| "step": 1867 |
| }, |
| { |
| "epoch": 3.13, |
| "learning_rate": 7.463087248322149e-06, |
| "loss": 1.292, |
| "step": 1868 |
| }, |
| { |
| "epoch": 3.14, |
| "learning_rate": 7.456375838926176e-06, |
| "loss": 1.2705, |
| "step": 1869 |
| }, |
| { |
| "epoch": 3.14, |
| "learning_rate": 7.4496644295302024e-06, |
| "loss": 1.3076, |
| "step": 1870 |
| }, |
| { |
| "epoch": 3.14, |
| "learning_rate": 7.442953020134229e-06, |
| "loss": 1.3232, |
| "step": 1871 |
| }, |
| { |
| "epoch": 3.14, |
| "learning_rate": 7.436241610738255e-06, |
| "loss": 1.3086, |
| "step": 1872 |
| }, |
| { |
| "epoch": 3.14, |
| "learning_rate": 7.429530201342282e-06, |
| "loss": 1.3047, |
| "step": 1873 |
| }, |
| { |
| "epoch": 3.14, |
| "learning_rate": 7.422818791946309e-06, |
| "loss": 1.3086, |
| "step": 1874 |
| }, |
| { |
| "epoch": 3.15, |
| "learning_rate": 7.416107382550336e-06, |
| "loss": 1.2656, |
| "step": 1875 |
| }, |
| { |
| "epoch": 3.15, |
| "learning_rate": 7.409395973154363e-06, |
| "loss": 1.3154, |
| "step": 1876 |
| }, |
| { |
| "epoch": 3.15, |
| "learning_rate": 7.4026845637583896e-06, |
| "loss": 1.3115, |
| "step": 1877 |
| }, |
| { |
| "epoch": 3.15, |
| "learning_rate": 7.395973154362417e-06, |
| "loss": 1.2949, |
| "step": 1878 |
| }, |
| { |
| "epoch": 3.15, |
| "learning_rate": 7.389261744966444e-06, |
| "loss": 1.2969, |
| "step": 1879 |
| }, |
| { |
| "epoch": 3.15, |
| "learning_rate": 7.382550335570471e-06, |
| "loss": 1.2979, |
| "step": 1880 |
| }, |
| { |
| "epoch": 3.16, |
| "learning_rate": 7.375838926174497e-06, |
| "loss": 1.3184, |
| "step": 1881 |
| }, |
| { |
| "epoch": 3.16, |
| "learning_rate": 7.369127516778524e-06, |
| "loss": 1.2754, |
| "step": 1882 |
| }, |
| { |
| "epoch": 3.16, |
| "learning_rate": 7.362416107382551e-06, |
| "loss": 1.3125, |
| "step": 1883 |
| }, |
| { |
| "epoch": 3.16, |
| "learning_rate": 7.3557046979865775e-06, |
| "loss": 1.3057, |
| "step": 1884 |
| }, |
| { |
| "epoch": 3.16, |
| "learning_rate": 7.348993288590604e-06, |
| "loss": 1.3311, |
| "step": 1885 |
| }, |
| { |
| "epoch": 3.16, |
| "learning_rate": 7.342281879194631e-06, |
| "loss": 1.3193, |
| "step": 1886 |
| }, |
| { |
| "epoch": 3.17, |
| "learning_rate": 7.335570469798658e-06, |
| "loss": 1.3047, |
| "step": 1887 |
| }, |
| { |
| "epoch": 3.17, |
| "learning_rate": 7.328859060402686e-06, |
| "loss": 1.3105, |
| "step": 1888 |
| }, |
| { |
| "epoch": 3.17, |
| "learning_rate": 7.322147651006713e-06, |
| "loss": 1.3037, |
| "step": 1889 |
| }, |
| { |
| "epoch": 3.17, |
| "learning_rate": 7.3154362416107395e-06, |
| "loss": 1.3193, |
| "step": 1890 |
| }, |
| { |
| "epoch": 3.17, |
| "learning_rate": 7.3087248322147655e-06, |
| "loss": 1.3057, |
| "step": 1891 |
| }, |
| { |
| "epoch": 3.17, |
| "learning_rate": 7.302013422818792e-06, |
| "loss": 1.3184, |
| "step": 1892 |
| }, |
| { |
| "epoch": 3.18, |
| "learning_rate": 7.295302013422819e-06, |
| "loss": 1.3076, |
| "step": 1893 |
| }, |
| { |
| "epoch": 3.18, |
| "learning_rate": 7.288590604026846e-06, |
| "loss": 1.2959, |
| "step": 1894 |
| }, |
| { |
| "epoch": 3.18, |
| "learning_rate": 7.281879194630873e-06, |
| "loss": 1.2734, |
| "step": 1895 |
| }, |
| { |
| "epoch": 3.18, |
| "learning_rate": 7.2751677852349e-06, |
| "loss": 1.2695, |
| "step": 1896 |
| }, |
| { |
| "epoch": 3.18, |
| "learning_rate": 7.2684563758389275e-06, |
| "loss": 1.2637, |
| "step": 1897 |
| }, |
| { |
| "epoch": 3.18, |
| "learning_rate": 7.261744966442954e-06, |
| "loss": 1.2988, |
| "step": 1898 |
| }, |
| { |
| "epoch": 3.19, |
| "learning_rate": 7.255033557046981e-06, |
| "loss": 1.3311, |
| "step": 1899 |
| }, |
| { |
| "epoch": 3.19, |
| "learning_rate": 7.248322147651007e-06, |
| "loss": 1.2949, |
| "step": 1900 |
| }, |
| { |
| "epoch": 3.19, |
| "learning_rate": 7.241610738255034e-06, |
| "loss": 1.3223, |
| "step": 1901 |
| }, |
| { |
| "epoch": 3.19, |
| "learning_rate": 7.234899328859061e-06, |
| "loss": 1.2754, |
| "step": 1902 |
| }, |
| { |
| "epoch": 3.19, |
| "learning_rate": 7.228187919463088e-06, |
| "loss": 1.2881, |
| "step": 1903 |
| }, |
| { |
| "epoch": 3.19, |
| "learning_rate": 7.221476510067115e-06, |
| "loss": 1.3057, |
| "step": 1904 |
| }, |
| { |
| "epoch": 3.2, |
| "learning_rate": 7.2147651006711414e-06, |
| "loss": 1.2881, |
| "step": 1905 |
| }, |
| { |
| "epoch": 3.2, |
| "learning_rate": 7.208053691275168e-06, |
| "loss": 1.2881, |
| "step": 1906 |
| }, |
| { |
| "epoch": 3.2, |
| "learning_rate": 7.201342281879196e-06, |
| "loss": 1.3301, |
| "step": 1907 |
| }, |
| { |
| "epoch": 3.2, |
| "learning_rate": 7.194630872483223e-06, |
| "loss": 1.293, |
| "step": 1908 |
| }, |
| { |
| "epoch": 3.2, |
| "learning_rate": 7.187919463087248e-06, |
| "loss": 1.3076, |
| "step": 1909 |
| }, |
| { |
| "epoch": 3.2, |
| "learning_rate": 7.181208053691276e-06, |
| "loss": 1.2773, |
| "step": 1910 |
| }, |
| { |
| "epoch": 3.21, |
| "learning_rate": 7.1744966442953026e-06, |
| "loss": 1.3066, |
| "step": 1911 |
| }, |
| { |
| "epoch": 3.21, |
| "learning_rate": 7.167785234899329e-06, |
| "loss": 1.2842, |
| "step": 1912 |
| }, |
| { |
| "epoch": 3.21, |
| "learning_rate": 7.161073825503356e-06, |
| "loss": 1.3135, |
| "step": 1913 |
| }, |
| { |
| "epoch": 3.21, |
| "learning_rate": 7.154362416107383e-06, |
| "loss": 1.3242, |
| "step": 1914 |
| }, |
| { |
| "epoch": 3.21, |
| "learning_rate": 7.14765100671141e-06, |
| "loss": 1.2871, |
| "step": 1915 |
| }, |
| { |
| "epoch": 3.21, |
| "learning_rate": 7.140939597315437e-06, |
| "loss": 1.2988, |
| "step": 1916 |
| }, |
| { |
| "epoch": 3.22, |
| "learning_rate": 7.1342281879194645e-06, |
| "loss": 1.2881, |
| "step": 1917 |
| }, |
| { |
| "epoch": 3.22, |
| "learning_rate": 7.12751677852349e-06, |
| "loss": 1.3018, |
| "step": 1918 |
| }, |
| { |
| "epoch": 3.22, |
| "learning_rate": 7.120805369127517e-06, |
| "loss": 1.2744, |
| "step": 1919 |
| }, |
| { |
| "epoch": 3.22, |
| "learning_rate": 7.114093959731544e-06, |
| "loss": 1.3018, |
| "step": 1920 |
| }, |
| { |
| "epoch": 3.22, |
| "learning_rate": 7.107382550335571e-06, |
| "loss": 1.2773, |
| "step": 1921 |
| }, |
| { |
| "epoch": 3.22, |
| "learning_rate": 7.100671140939598e-06, |
| "loss": 1.2812, |
| "step": 1922 |
| }, |
| { |
| "epoch": 3.23, |
| "learning_rate": 7.093959731543625e-06, |
| "loss": 1.3281, |
| "step": 1923 |
| }, |
| { |
| "epoch": 3.23, |
| "learning_rate": 7.087248322147652e-06, |
| "loss": 1.3018, |
| "step": 1924 |
| }, |
| { |
| "epoch": 3.23, |
| "learning_rate": 7.0805369127516785e-06, |
| "loss": 1.2695, |
| "step": 1925 |
| }, |
| { |
| "epoch": 3.23, |
| "learning_rate": 7.073825503355705e-06, |
| "loss": 1.2656, |
| "step": 1926 |
| }, |
| { |
| "epoch": 3.23, |
| "learning_rate": 7.067114093959731e-06, |
| "loss": 1.2568, |
| "step": 1927 |
| }, |
| { |
| "epoch": 3.23, |
| "learning_rate": 7.060402684563758e-06, |
| "loss": 1.291, |
| "step": 1928 |
| }, |
| { |
| "epoch": 3.24, |
| "learning_rate": 7.053691275167786e-06, |
| "loss": 1.3057, |
| "step": 1929 |
| }, |
| { |
| "epoch": 3.24, |
| "learning_rate": 7.046979865771813e-06, |
| "loss": 1.2871, |
| "step": 1930 |
| }, |
| { |
| "epoch": 3.24, |
| "learning_rate": 7.04026845637584e-06, |
| "loss": 1.2832, |
| "step": 1931 |
| }, |
| { |
| "epoch": 3.24, |
| "learning_rate": 7.0335570469798665e-06, |
| "loss": 1.293, |
| "step": 1932 |
| }, |
| { |
| "epoch": 3.24, |
| "learning_rate": 7.026845637583893e-06, |
| "loss": 1.2861, |
| "step": 1933 |
| }, |
| { |
| "epoch": 3.24, |
| "learning_rate": 7.02013422818792e-06, |
| "loss": 1.2637, |
| "step": 1934 |
| }, |
| { |
| "epoch": 3.25, |
| "learning_rate": 7.013422818791947e-06, |
| "loss": 1.2598, |
| "step": 1935 |
| }, |
| { |
| "epoch": 3.25, |
| "learning_rate": 7.006711409395974e-06, |
| "loss": 1.2617, |
| "step": 1936 |
| }, |
| { |
| "epoch": 3.25, |
| "learning_rate": 7e-06, |
| "loss": 1.3174, |
| "step": 1937 |
| }, |
| { |
| "epoch": 3.25, |
| "learning_rate": 6.993288590604027e-06, |
| "loss": 1.3086, |
| "step": 1938 |
| }, |
| { |
| "epoch": 3.25, |
| "learning_rate": 6.9865771812080544e-06, |
| "loss": 1.3193, |
| "step": 1939 |
| }, |
| { |
| "epoch": 3.26, |
| "learning_rate": 6.979865771812081e-06, |
| "loss": 1.2686, |
| "step": 1940 |
| }, |
| { |
| "epoch": 3.26, |
| "learning_rate": 6.973154362416108e-06, |
| "loss": 1.292, |
| "step": 1941 |
| }, |
| { |
| "epoch": 3.26, |
| "learning_rate": 6.966442953020135e-06, |
| "loss": 1.2949, |
| "step": 1942 |
| }, |
| { |
| "epoch": 3.26, |
| "learning_rate": 6.959731543624162e-06, |
| "loss": 1.2783, |
| "step": 1943 |
| }, |
| { |
| "epoch": 3.26, |
| "learning_rate": 6.953020134228189e-06, |
| "loss": 1.2793, |
| "step": 1944 |
| }, |
| { |
| "epoch": 3.26, |
| "learning_rate": 6.9463087248322156e-06, |
| "loss": 1.2773, |
| "step": 1945 |
| }, |
| { |
| "epoch": 3.27, |
| "learning_rate": 6.9395973154362416e-06, |
| "loss": 1.3154, |
| "step": 1946 |
| }, |
| { |
| "epoch": 3.27, |
| "learning_rate": 6.9328859060402684e-06, |
| "loss": 1.2744, |
| "step": 1947 |
| }, |
| { |
| "epoch": 3.27, |
| "learning_rate": 6.926174496644295e-06, |
| "loss": 1.335, |
| "step": 1948 |
| }, |
| { |
| "epoch": 3.27, |
| "learning_rate": 6.919463087248323e-06, |
| "loss": 1.3164, |
| "step": 1949 |
| }, |
| { |
| "epoch": 3.27, |
| "learning_rate": 6.91275167785235e-06, |
| "loss": 1.3281, |
| "step": 1950 |
| }, |
| { |
| "epoch": 3.27, |
| "learning_rate": 6.906040268456377e-06, |
| "loss": 1.2939, |
| "step": 1951 |
| }, |
| { |
| "epoch": 3.28, |
| "learning_rate": 6.8993288590604035e-06, |
| "loss": 1.2881, |
| "step": 1952 |
| }, |
| { |
| "epoch": 3.28, |
| "learning_rate": 6.89261744966443e-06, |
| "loss": 1.2598, |
| "step": 1953 |
| }, |
| { |
| "epoch": 3.28, |
| "learning_rate": 6.885906040268457e-06, |
| "loss": 1.2793, |
| "step": 1954 |
| }, |
| { |
| "epoch": 3.28, |
| "learning_rate": 6.879194630872483e-06, |
| "loss": 1.3223, |
| "step": 1955 |
| }, |
| { |
| "epoch": 3.28, |
| "learning_rate": 6.87248322147651e-06, |
| "loss": 1.2959, |
| "step": 1956 |
| }, |
| { |
| "epoch": 3.28, |
| "learning_rate": 6.865771812080537e-06, |
| "loss": 1.29, |
| "step": 1957 |
| }, |
| { |
| "epoch": 3.29, |
| "learning_rate": 6.859060402684564e-06, |
| "loss": 1.3262, |
| "step": 1958 |
| }, |
| { |
| "epoch": 3.29, |
| "learning_rate": 6.8523489932885915e-06, |
| "loss": 1.2812, |
| "step": 1959 |
| }, |
| { |
| "epoch": 3.29, |
| "learning_rate": 6.845637583892618e-06, |
| "loss": 1.2812, |
| "step": 1960 |
| }, |
| { |
| "epoch": 3.29, |
| "learning_rate": 6.838926174496645e-06, |
| "loss": 1.2881, |
| "step": 1961 |
| }, |
| { |
| "epoch": 3.29, |
| "learning_rate": 6.832214765100672e-06, |
| "loss": 1.2852, |
| "step": 1962 |
| }, |
| { |
| "epoch": 3.29, |
| "learning_rate": 6.825503355704699e-06, |
| "loss": 1.3105, |
| "step": 1963 |
| }, |
| { |
| "epoch": 3.3, |
| "learning_rate": 6.818791946308725e-06, |
| "loss": 1.2627, |
| "step": 1964 |
| }, |
| { |
| "epoch": 3.3, |
| "learning_rate": 6.812080536912752e-06, |
| "loss": 1.2861, |
| "step": 1965 |
| }, |
| { |
| "epoch": 3.3, |
| "learning_rate": 6.805369127516779e-06, |
| "loss": 1.3193, |
| "step": 1966 |
| }, |
| { |
| "epoch": 3.3, |
| "learning_rate": 6.7986577181208055e-06, |
| "loss": 1.29, |
| "step": 1967 |
| }, |
| { |
| "epoch": 3.3, |
| "learning_rate": 6.791946308724832e-06, |
| "loss": 1.2783, |
| "step": 1968 |
| }, |
| { |
| "epoch": 3.3, |
| "learning_rate": 6.78523489932886e-06, |
| "loss": 1.2832, |
| "step": 1969 |
| }, |
| { |
| "epoch": 3.31, |
| "learning_rate": 6.778523489932887e-06, |
| "loss": 1.3184, |
| "step": 1970 |
| }, |
| { |
| "epoch": 3.31, |
| "learning_rate": 6.771812080536914e-06, |
| "loss": 1.3281, |
| "step": 1971 |
| }, |
| { |
| "epoch": 3.31, |
| "learning_rate": 6.765100671140941e-06, |
| "loss": 1.2686, |
| "step": 1972 |
| }, |
| { |
| "epoch": 3.31, |
| "learning_rate": 6.758389261744967e-06, |
| "loss": 1.3125, |
| "step": 1973 |
| }, |
| { |
| "epoch": 3.31, |
| "learning_rate": 6.7516778523489935e-06, |
| "loss": 1.2959, |
| "step": 1974 |
| }, |
| { |
| "epoch": 3.31, |
| "learning_rate": 6.74496644295302e-06, |
| "loss": 1.3262, |
| "step": 1975 |
| }, |
| { |
| "epoch": 3.32, |
| "learning_rate": 6.738255033557047e-06, |
| "loss": 1.3027, |
| "step": 1976 |
| }, |
| { |
| "epoch": 3.32, |
| "learning_rate": 6.731543624161074e-06, |
| "loss": 1.2676, |
| "step": 1977 |
| }, |
| { |
| "epoch": 3.32, |
| "learning_rate": 6.724832214765101e-06, |
| "loss": 1.2441, |
| "step": 1978 |
| }, |
| { |
| "epoch": 3.32, |
| "learning_rate": 6.7181208053691286e-06, |
| "loss": 1.2988, |
| "step": 1979 |
| }, |
| { |
| "epoch": 3.32, |
| "learning_rate": 6.711409395973155e-06, |
| "loss": 1.3027, |
| "step": 1980 |
| }, |
| { |
| "epoch": 3.32, |
| "learning_rate": 6.704697986577182e-06, |
| "loss": 1.2793, |
| "step": 1981 |
| }, |
| { |
| "epoch": 3.33, |
| "learning_rate": 6.697986577181209e-06, |
| "loss": 1.2646, |
| "step": 1982 |
| }, |
| { |
| "epoch": 3.33, |
| "learning_rate": 6.691275167785235e-06, |
| "loss": 1.291, |
| "step": 1983 |
| }, |
| { |
| "epoch": 3.33, |
| "learning_rate": 6.684563758389262e-06, |
| "loss": 1.2773, |
| "step": 1984 |
| }, |
| { |
| "epoch": 3.33, |
| "learning_rate": 6.677852348993289e-06, |
| "loss": 1.3057, |
| "step": 1985 |
| }, |
| { |
| "epoch": 3.33, |
| "learning_rate": 6.671140939597316e-06, |
| "loss": 1.2734, |
| "step": 1986 |
| }, |
| { |
| "epoch": 3.33, |
| "learning_rate": 6.6644295302013425e-06, |
| "loss": 1.2822, |
| "step": 1987 |
| }, |
| { |
| "epoch": 3.34, |
| "learning_rate": 6.65771812080537e-06, |
| "loss": 1.3027, |
| "step": 1988 |
| }, |
| { |
| "epoch": 3.34, |
| "learning_rate": 6.651006711409397e-06, |
| "loss": 1.2891, |
| "step": 1989 |
| }, |
| { |
| "epoch": 3.34, |
| "learning_rate": 6.644295302013424e-06, |
| "loss": 1.3223, |
| "step": 1990 |
| }, |
| { |
| "epoch": 3.34, |
| "learning_rate": 6.637583892617451e-06, |
| "loss": 1.2939, |
| "step": 1991 |
| }, |
| { |
| "epoch": 3.34, |
| "learning_rate": 6.630872483221477e-06, |
| "loss": 1.3281, |
| "step": 1992 |
| }, |
| { |
| "epoch": 3.34, |
| "learning_rate": 6.624161073825504e-06, |
| "loss": 1.293, |
| "step": 1993 |
| }, |
| { |
| "epoch": 3.35, |
| "learning_rate": 6.6174496644295305e-06, |
| "loss": 1.2666, |
| "step": 1994 |
| }, |
| { |
| "epoch": 3.35, |
| "learning_rate": 6.610738255033557e-06, |
| "loss": 1.2959, |
| "step": 1995 |
| }, |
| { |
| "epoch": 3.35, |
| "learning_rate": 6.604026845637584e-06, |
| "loss": 1.25, |
| "step": 1996 |
| }, |
| { |
| "epoch": 3.35, |
| "learning_rate": 6.597315436241611e-06, |
| "loss": 1.2861, |
| "step": 1997 |
| }, |
| { |
| "epoch": 3.35, |
| "learning_rate": 6.590604026845639e-06, |
| "loss": 1.2617, |
| "step": 1998 |
| }, |
| { |
| "epoch": 3.35, |
| "learning_rate": 6.583892617449666e-06, |
| "loss": 1.2969, |
| "step": 1999 |
| }, |
| { |
| "epoch": 3.36, |
| "learning_rate": 6.5771812080536925e-06, |
| "loss": 1.2686, |
| "step": 2000 |
| }, |
| { |
| "epoch": 3.36, |
| "learning_rate": 6.5704697986577185e-06, |
| "loss": 1.2793, |
| "step": 2001 |
| }, |
| { |
| "epoch": 3.36, |
| "learning_rate": 6.563758389261745e-06, |
| "loss": 1.3047, |
| "step": 2002 |
| }, |
| { |
| "epoch": 3.36, |
| "learning_rate": 6.557046979865772e-06, |
| "loss": 1.3281, |
| "step": 2003 |
| }, |
| { |
| "epoch": 3.36, |
| "learning_rate": 6.550335570469799e-06, |
| "loss": 1.2891, |
| "step": 2004 |
| }, |
| { |
| "epoch": 3.36, |
| "learning_rate": 6.543624161073826e-06, |
| "loss": 1.3086, |
| "step": 2005 |
| }, |
| { |
| "epoch": 3.37, |
| "learning_rate": 6.536912751677853e-06, |
| "loss": 1.29, |
| "step": 2006 |
| }, |
| { |
| "epoch": 3.37, |
| "learning_rate": 6.53020134228188e-06, |
| "loss": 1.3018, |
| "step": 2007 |
| }, |
| { |
| "epoch": 3.37, |
| "learning_rate": 6.523489932885907e-06, |
| "loss": 1.2979, |
| "step": 2008 |
| }, |
| { |
| "epoch": 3.37, |
| "learning_rate": 6.516778523489934e-06, |
| "loss": 1.2637, |
| "step": 2009 |
| }, |
| { |
| "epoch": 3.37, |
| "learning_rate": 6.51006711409396e-06, |
| "loss": 1.2939, |
| "step": 2010 |
| }, |
| { |
| "epoch": 3.37, |
| "learning_rate": 6.503355704697987e-06, |
| "loss": 1.2744, |
| "step": 2011 |
| }, |
| { |
| "epoch": 3.38, |
| "learning_rate": 6.496644295302014e-06, |
| "loss": 1.3066, |
| "step": 2012 |
| }, |
| { |
| "epoch": 3.38, |
| "learning_rate": 6.489932885906041e-06, |
| "loss": 1.2637, |
| "step": 2013 |
| }, |
| { |
| "epoch": 3.38, |
| "learning_rate": 6.483221476510068e-06, |
| "loss": 1.2715, |
| "step": 2014 |
| }, |
| { |
| "epoch": 3.38, |
| "learning_rate": 6.4765100671140944e-06, |
| "loss": 1.2637, |
| "step": 2015 |
| }, |
| { |
| "epoch": 3.38, |
| "learning_rate": 6.469798657718121e-06, |
| "loss": 1.2812, |
| "step": 2016 |
| }, |
| { |
| "epoch": 3.38, |
| "learning_rate": 6.463087248322148e-06, |
| "loss": 1.2939, |
| "step": 2017 |
| }, |
| { |
| "epoch": 3.39, |
| "learning_rate": 6.456375838926176e-06, |
| "loss": 1.2705, |
| "step": 2018 |
| }, |
| { |
| "epoch": 3.39, |
| "learning_rate": 6.449664429530201e-06, |
| "loss": 1.2676, |
| "step": 2019 |
| }, |
| { |
| "epoch": 3.39, |
| "learning_rate": 6.442953020134229e-06, |
| "loss": 1.2783, |
| "step": 2020 |
| }, |
| { |
| "epoch": 3.39, |
| "learning_rate": 6.4362416107382556e-06, |
| "loss": 1.3125, |
| "step": 2021 |
| }, |
| { |
| "epoch": 3.39, |
| "learning_rate": 6.429530201342282e-06, |
| "loss": 1.2891, |
| "step": 2022 |
| }, |
| { |
| "epoch": 3.39, |
| "learning_rate": 6.422818791946309e-06, |
| "loss": 1.2842, |
| "step": 2023 |
| }, |
| { |
| "epoch": 3.4, |
| "learning_rate": 6.416107382550336e-06, |
| "loss": 1.2754, |
| "step": 2024 |
| }, |
| { |
| "epoch": 3.4, |
| "learning_rate": 6.409395973154363e-06, |
| "loss": 1.3018, |
| "step": 2025 |
| }, |
| { |
| "epoch": 3.4, |
| "learning_rate": 6.40268456375839e-06, |
| "loss": 1.3125, |
| "step": 2026 |
| }, |
| { |
| "epoch": 3.4, |
| "learning_rate": 6.395973154362417e-06, |
| "loss": 1.2891, |
| "step": 2027 |
| }, |
| { |
| "epoch": 3.4, |
| "learning_rate": 6.389261744966444e-06, |
| "loss": 1.3047, |
| "step": 2028 |
| }, |
| { |
| "epoch": 3.4, |
| "learning_rate": 6.3825503355704695e-06, |
| "loss": 1.3252, |
| "step": 2029 |
| }, |
| { |
| "epoch": 3.41, |
| "learning_rate": 6.375838926174497e-06, |
| "loss": 1.2793, |
| "step": 2030 |
| }, |
| { |
| "epoch": 3.41, |
| "learning_rate": 6.369127516778524e-06, |
| "loss": 1.2861, |
| "step": 2031 |
| }, |
| { |
| "epoch": 3.41, |
| "learning_rate": 6.362416107382551e-06, |
| "loss": 1.3135, |
| "step": 2032 |
| }, |
| { |
| "epoch": 3.41, |
| "learning_rate": 6.355704697986578e-06, |
| "loss": 1.2588, |
| "step": 2033 |
| }, |
| { |
| "epoch": 3.41, |
| "learning_rate": 6.348993288590605e-06, |
| "loss": 1.2881, |
| "step": 2034 |
| }, |
| { |
| "epoch": 3.41, |
| "learning_rate": 6.3422818791946315e-06, |
| "loss": 1.29, |
| "step": 2035 |
| }, |
| { |
| "epoch": 3.42, |
| "learning_rate": 6.335570469798658e-06, |
| "loss": 1.2705, |
| "step": 2036 |
| }, |
| { |
| "epoch": 3.42, |
| "learning_rate": 6.328859060402685e-06, |
| "loss": 1.2236, |
| "step": 2037 |
| }, |
| { |
| "epoch": 3.42, |
| "learning_rate": 6.322147651006711e-06, |
| "loss": 1.3066, |
| "step": 2038 |
| }, |
| { |
| "epoch": 3.42, |
| "learning_rate": 6.315436241610738e-06, |
| "loss": 1.3164, |
| "step": 2039 |
| }, |
| { |
| "epoch": 3.42, |
| "learning_rate": 6.308724832214766e-06, |
| "loss": 1.2979, |
| "step": 2040 |
| }, |
| { |
| "epoch": 3.42, |
| "learning_rate": 6.302013422818793e-06, |
| "loss": 1.334, |
| "step": 2041 |
| }, |
| { |
| "epoch": 3.43, |
| "learning_rate": 6.2953020134228195e-06, |
| "loss": 1.3203, |
| "step": 2042 |
| }, |
| { |
| "epoch": 3.43, |
| "learning_rate": 6.288590604026846e-06, |
| "loss": 1.2812, |
| "step": 2043 |
| }, |
| { |
| "epoch": 3.43, |
| "learning_rate": 6.281879194630873e-06, |
| "loss": 1.2998, |
| "step": 2044 |
| }, |
| { |
| "epoch": 3.43, |
| "learning_rate": 6.2751677852349e-06, |
| "loss": 1.2949, |
| "step": 2045 |
| }, |
| { |
| "epoch": 3.43, |
| "learning_rate": 6.268456375838927e-06, |
| "loss": 1.3096, |
| "step": 2046 |
| }, |
| { |
| "epoch": 3.43, |
| "learning_rate": 6.261744966442953e-06, |
| "loss": 1.293, |
| "step": 2047 |
| }, |
| { |
| "epoch": 3.44, |
| "learning_rate": 6.25503355704698e-06, |
| "loss": 1.29, |
| "step": 2048 |
| }, |
| { |
| "epoch": 3.44, |
| "learning_rate": 6.248322147651007e-06, |
| "loss": 1.3066, |
| "step": 2049 |
| }, |
| { |
| "epoch": 3.44, |
| "learning_rate": 6.241610738255034e-06, |
| "loss": 1.3027, |
| "step": 2050 |
| }, |
| { |
| "epoch": 3.44, |
| "learning_rate": 6.234899328859061e-06, |
| "loss": 1.3125, |
| "step": 2051 |
| }, |
| { |
| "epoch": 3.44, |
| "learning_rate": 6.228187919463088e-06, |
| "loss": 1.3193, |
| "step": 2052 |
| }, |
| { |
| "epoch": 3.44, |
| "learning_rate": 6.221476510067115e-06, |
| "loss": 1.293, |
| "step": 2053 |
| }, |
| { |
| "epoch": 3.45, |
| "learning_rate": 6.214765100671142e-06, |
| "loss": 1.2578, |
| "step": 2054 |
| }, |
| { |
| "epoch": 3.45, |
| "learning_rate": 6.2080536912751686e-06, |
| "loss": 1.2725, |
| "step": 2055 |
| }, |
| { |
| "epoch": 3.45, |
| "learning_rate": 6.2013422818791946e-06, |
| "loss": 1.2939, |
| "step": 2056 |
| }, |
| { |
| "epoch": 3.45, |
| "learning_rate": 6.194630872483221e-06, |
| "loss": 1.3008, |
| "step": 2057 |
| }, |
| { |
| "epoch": 3.45, |
| "learning_rate": 6.187919463087248e-06, |
| "loss": 1.3154, |
| "step": 2058 |
| }, |
| { |
| "epoch": 3.45, |
| "learning_rate": 6.181208053691275e-06, |
| "loss": 1.2969, |
| "step": 2059 |
| }, |
| { |
| "epoch": 3.46, |
| "learning_rate": 6.174496644295303e-06, |
| "loss": 1.3213, |
| "step": 2060 |
| }, |
| { |
| "epoch": 3.46, |
| "learning_rate": 6.16778523489933e-06, |
| "loss": 1.3125, |
| "step": 2061 |
| }, |
| { |
| "epoch": 3.46, |
| "learning_rate": 6.1610738255033565e-06, |
| "loss": 1.2852, |
| "step": 2062 |
| }, |
| { |
| "epoch": 3.46, |
| "learning_rate": 6.154362416107383e-06, |
| "loss": 1.2939, |
| "step": 2063 |
| }, |
| { |
| "epoch": 3.46, |
| "learning_rate": 6.14765100671141e-06, |
| "loss": 1.2627, |
| "step": 2064 |
| }, |
| { |
| "epoch": 3.46, |
| "learning_rate": 6.140939597315436e-06, |
| "loss": 1.2812, |
| "step": 2065 |
| }, |
| { |
| "epoch": 3.47, |
| "learning_rate": 6.134228187919463e-06, |
| "loss": 1.2881, |
| "step": 2066 |
| }, |
| { |
| "epoch": 3.47, |
| "learning_rate": 6.12751677852349e-06, |
| "loss": 1.3125, |
| "step": 2067 |
| }, |
| { |
| "epoch": 3.47, |
| "learning_rate": 6.120805369127517e-06, |
| "loss": 1.2822, |
| "step": 2068 |
| }, |
| { |
| "epoch": 3.47, |
| "learning_rate": 6.114093959731544e-06, |
| "loss": 1.2637, |
| "step": 2069 |
| }, |
| { |
| "epoch": 3.47, |
| "learning_rate": 6.107382550335571e-06, |
| "loss": 1.2607, |
| "step": 2070 |
| }, |
| { |
| "epoch": 3.47, |
| "learning_rate": 6.100671140939598e-06, |
| "loss": 1.2754, |
| "step": 2071 |
| }, |
| { |
| "epoch": 3.48, |
| "learning_rate": 6.093959731543625e-06, |
| "loss": 1.3066, |
| "step": 2072 |
| }, |
| { |
| "epoch": 3.48, |
| "learning_rate": 6.087248322147652e-06, |
| "loss": 1.2832, |
| "step": 2073 |
| }, |
| { |
| "epoch": 3.48, |
| "learning_rate": 6.080536912751679e-06, |
| "loss": 1.2686, |
| "step": 2074 |
| }, |
| { |
| "epoch": 3.48, |
| "learning_rate": 6.073825503355705e-06, |
| "loss": 1.2822, |
| "step": 2075 |
| }, |
| { |
| "epoch": 3.48, |
| "learning_rate": 6.067114093959732e-06, |
| "loss": 1.3125, |
| "step": 2076 |
| }, |
| { |
| "epoch": 3.48, |
| "learning_rate": 6.0604026845637585e-06, |
| "loss": 1.2832, |
| "step": 2077 |
| }, |
| { |
| "epoch": 3.49, |
| "learning_rate": 6.053691275167785e-06, |
| "loss": 1.2412, |
| "step": 2078 |
| }, |
| { |
| "epoch": 3.49, |
| "learning_rate": 6.046979865771813e-06, |
| "loss": 1.2881, |
| "step": 2079 |
| }, |
| { |
| "epoch": 3.49, |
| "learning_rate": 6.04026845637584e-06, |
| "loss": 1.3008, |
| "step": 2080 |
| }, |
| { |
| "epoch": 3.49, |
| "learning_rate": 6.033557046979867e-06, |
| "loss": 1.2812, |
| "step": 2081 |
| }, |
| { |
| "epoch": 3.49, |
| "learning_rate": 6.026845637583894e-06, |
| "loss": 1.292, |
| "step": 2082 |
| }, |
| { |
| "epoch": 3.49, |
| "learning_rate": 6.0201342281879204e-06, |
| "loss": 1.2334, |
| "step": 2083 |
| }, |
| { |
| "epoch": 3.5, |
| "learning_rate": 6.0134228187919464e-06, |
| "loss": 1.2783, |
| "step": 2084 |
| }, |
| { |
| "epoch": 3.5, |
| "learning_rate": 6.006711409395973e-06, |
| "loss": 1.3037, |
| "step": 2085 |
| }, |
| { |
| "epoch": 3.5, |
| "learning_rate": 6e-06, |
| "loss": 1.2783, |
| "step": 2086 |
| }, |
| { |
| "epoch": 3.5, |
| "learning_rate": 5.993288590604027e-06, |
| "loss": 1.3115, |
| "step": 2087 |
| }, |
| { |
| "epoch": 3.5, |
| "learning_rate": 5.986577181208054e-06, |
| "loss": 1.2969, |
| "step": 2088 |
| }, |
| { |
| "epoch": 3.51, |
| "learning_rate": 5.9798657718120816e-06, |
| "loss": 1.2734, |
| "step": 2089 |
| }, |
| { |
| "epoch": 3.51, |
| "learning_rate": 5.973154362416108e-06, |
| "loss": 1.2988, |
| "step": 2090 |
| }, |
| { |
| "epoch": 3.51, |
| "learning_rate": 5.966442953020135e-06, |
| "loss": 1.2891, |
| "step": 2091 |
| }, |
| { |
| "epoch": 3.51, |
| "learning_rate": 5.959731543624162e-06, |
| "loss": 1.2666, |
| "step": 2092 |
| }, |
| { |
| "epoch": 3.51, |
| "learning_rate": 5.953020134228188e-06, |
| "loss": 1.291, |
| "step": 2093 |
| }, |
| { |
| "epoch": 3.51, |
| "learning_rate": 5.946308724832215e-06, |
| "loss": 1.2832, |
| "step": 2094 |
| }, |
| { |
| "epoch": 3.52, |
| "learning_rate": 5.939597315436242e-06, |
| "loss": 1.2822, |
| "step": 2095 |
| }, |
| { |
| "epoch": 3.52, |
| "learning_rate": 5.932885906040269e-06, |
| "loss": 1.2715, |
| "step": 2096 |
| }, |
| { |
| "epoch": 3.52, |
| "learning_rate": 5.9261744966442955e-06, |
| "loss": 1.3145, |
| "step": 2097 |
| }, |
| { |
| "epoch": 3.52, |
| "learning_rate": 5.919463087248322e-06, |
| "loss": 1.2881, |
| "step": 2098 |
| }, |
| { |
| "epoch": 3.52, |
| "learning_rate": 5.91275167785235e-06, |
| "loss": 1.2744, |
| "step": 2099 |
| }, |
| { |
| "epoch": 3.52, |
| "learning_rate": 5.906040268456377e-06, |
| "loss": 1.2891, |
| "step": 2100 |
| }, |
| { |
| "epoch": 3.53, |
| "learning_rate": 5.899328859060404e-06, |
| "loss": 1.2783, |
| "step": 2101 |
| }, |
| { |
| "epoch": 3.53, |
| "learning_rate": 5.89261744966443e-06, |
| "loss": 1.2617, |
| "step": 2102 |
| }, |
| { |
| "epoch": 3.53, |
| "learning_rate": 5.885906040268457e-06, |
| "loss": 1.3037, |
| "step": 2103 |
| }, |
| { |
| "epoch": 3.53, |
| "learning_rate": 5.8791946308724835e-06, |
| "loss": 1.2959, |
| "step": 2104 |
| }, |
| { |
| "epoch": 3.53, |
| "learning_rate": 5.87248322147651e-06, |
| "loss": 1.2988, |
| "step": 2105 |
| }, |
| { |
| "epoch": 3.53, |
| "learning_rate": 5.865771812080537e-06, |
| "loss": 1.2676, |
| "step": 2106 |
| }, |
| { |
| "epoch": 3.54, |
| "learning_rate": 5.859060402684564e-06, |
| "loss": 1.2998, |
| "step": 2107 |
| }, |
| { |
| "epoch": 3.54, |
| "learning_rate": 5.852348993288591e-06, |
| "loss": 1.3262, |
| "step": 2108 |
| }, |
| { |
| "epoch": 3.54, |
| "learning_rate": 5.845637583892619e-06, |
| "loss": 1.3086, |
| "step": 2109 |
| }, |
| { |
| "epoch": 3.54, |
| "learning_rate": 5.8389261744966455e-06, |
| "loss": 1.3076, |
| "step": 2110 |
| }, |
| { |
| "epoch": 3.54, |
| "learning_rate": 5.8322147651006715e-06, |
| "loss": 1.2568, |
| "step": 2111 |
| }, |
| { |
| "epoch": 3.54, |
| "learning_rate": 5.825503355704698e-06, |
| "loss": 1.2832, |
| "step": 2112 |
| }, |
| { |
| "epoch": 3.55, |
| "learning_rate": 5.818791946308725e-06, |
| "loss": 1.3203, |
| "step": 2113 |
| }, |
| { |
| "epoch": 3.55, |
| "learning_rate": 5.812080536912752e-06, |
| "loss": 1.2812, |
| "step": 2114 |
| }, |
| { |
| "epoch": 3.55, |
| "learning_rate": 5.805369127516779e-06, |
| "loss": 1.2744, |
| "step": 2115 |
| }, |
| { |
| "epoch": 3.55, |
| "learning_rate": 5.798657718120806e-06, |
| "loss": 1.2686, |
| "step": 2116 |
| }, |
| { |
| "epoch": 3.55, |
| "learning_rate": 5.791946308724833e-06, |
| "loss": 1.29, |
| "step": 2117 |
| }, |
| { |
| "epoch": 3.55, |
| "learning_rate": 5.7852348993288594e-06, |
| "loss": 1.2627, |
| "step": 2118 |
| }, |
| { |
| "epoch": 3.56, |
| "learning_rate": 5.778523489932887e-06, |
| "loss": 1.3408, |
| "step": 2119 |
| }, |
| { |
| "epoch": 3.56, |
| "learning_rate": 5.771812080536914e-06, |
| "loss": 1.2715, |
| "step": 2120 |
| }, |
| { |
| "epoch": 3.56, |
| "learning_rate": 5.76510067114094e-06, |
| "loss": 1.2832, |
| "step": 2121 |
| }, |
| { |
| "epoch": 3.56, |
| "learning_rate": 5.758389261744967e-06, |
| "loss": 1.2764, |
| "step": 2122 |
| }, |
| { |
| "epoch": 3.56, |
| "learning_rate": 5.751677852348994e-06, |
| "loss": 1.2773, |
| "step": 2123 |
| }, |
| { |
| "epoch": 3.56, |
| "learning_rate": 5.7449664429530206e-06, |
| "loss": 1.3154, |
| "step": 2124 |
| }, |
| { |
| "epoch": 3.57, |
| "learning_rate": 5.738255033557047e-06, |
| "loss": 1.2861, |
| "step": 2125 |
| }, |
| { |
| "epoch": 3.57, |
| "learning_rate": 5.731543624161074e-06, |
| "loss": 1.2812, |
| "step": 2126 |
| }, |
| { |
| "epoch": 3.57, |
| "learning_rate": 5.724832214765101e-06, |
| "loss": 1.2871, |
| "step": 2127 |
| }, |
| { |
| "epoch": 3.57, |
| "learning_rate": 5.718120805369128e-06, |
| "loss": 1.2686, |
| "step": 2128 |
| }, |
| { |
| "epoch": 3.57, |
| "learning_rate": 5.711409395973156e-06, |
| "loss": 1.3135, |
| "step": 2129 |
| }, |
| { |
| "epoch": 3.57, |
| "learning_rate": 5.704697986577181e-06, |
| "loss": 1.292, |
| "step": 2130 |
| }, |
| { |
| "epoch": 3.58, |
| "learning_rate": 5.6979865771812085e-06, |
| "loss": 1.3066, |
| "step": 2131 |
| }, |
| { |
| "epoch": 3.58, |
| "learning_rate": 5.691275167785235e-06, |
| "loss": 1.2646, |
| "step": 2132 |
| }, |
| { |
| "epoch": 3.58, |
| "learning_rate": 5.684563758389262e-06, |
| "loss": 1.2852, |
| "step": 2133 |
| }, |
| { |
| "epoch": 3.58, |
| "learning_rate": 5.677852348993289e-06, |
| "loss": 1.3027, |
| "step": 2134 |
| }, |
| { |
| "epoch": 3.58, |
| "learning_rate": 5.671140939597316e-06, |
| "loss": 1.3086, |
| "step": 2135 |
| }, |
| { |
| "epoch": 3.58, |
| "learning_rate": 5.664429530201343e-06, |
| "loss": 1.3096, |
| "step": 2136 |
| }, |
| { |
| "epoch": 3.59, |
| "learning_rate": 5.65771812080537e-06, |
| "loss": 1.2793, |
| "step": 2137 |
| }, |
| { |
| "epoch": 3.59, |
| "learning_rate": 5.651006711409397e-06, |
| "loss": 1.2803, |
| "step": 2138 |
| }, |
| { |
| "epoch": 3.59, |
| "learning_rate": 5.6442953020134225e-06, |
| "loss": 1.2969, |
| "step": 2139 |
| }, |
| { |
| "epoch": 3.59, |
| "learning_rate": 5.637583892617449e-06, |
| "loss": 1.2842, |
| "step": 2140 |
| }, |
| { |
| "epoch": 3.59, |
| "learning_rate": 5.630872483221477e-06, |
| "loss": 1.2666, |
| "step": 2141 |
| }, |
| { |
| "epoch": 3.59, |
| "learning_rate": 5.624161073825504e-06, |
| "loss": 1.3115, |
| "step": 2142 |
| }, |
| { |
| "epoch": 3.6, |
| "learning_rate": 5.617449664429531e-06, |
| "loss": 1.2773, |
| "step": 2143 |
| }, |
| { |
| "epoch": 3.6, |
| "learning_rate": 5.610738255033558e-06, |
| "loss": 1.291, |
| "step": 2144 |
| }, |
| { |
| "epoch": 3.6, |
| "learning_rate": 5.6040268456375845e-06, |
| "loss": 1.2734, |
| "step": 2145 |
| }, |
| { |
| "epoch": 3.6, |
| "learning_rate": 5.597315436241611e-06, |
| "loss": 1.2832, |
| "step": 2146 |
| }, |
| { |
| "epoch": 3.6, |
| "learning_rate": 5.590604026845638e-06, |
| "loss": 1.2969, |
| "step": 2147 |
| }, |
| { |
| "epoch": 3.6, |
| "learning_rate": 5.583892617449664e-06, |
| "loss": 1.2881, |
| "step": 2148 |
| }, |
| { |
| "epoch": 3.61, |
| "learning_rate": 5.577181208053691e-06, |
| "loss": 1.2695, |
| "step": 2149 |
| }, |
| { |
| "epoch": 3.61, |
| "learning_rate": 5.570469798657718e-06, |
| "loss": 1.2881, |
| "step": 2150 |
| }, |
| { |
| "epoch": 3.61, |
| "learning_rate": 5.563758389261746e-06, |
| "loss": 1.2891, |
| "step": 2151 |
| }, |
| { |
| "epoch": 3.61, |
| "learning_rate": 5.5570469798657725e-06, |
| "loss": 1.2637, |
| "step": 2152 |
| }, |
| { |
| "epoch": 3.61, |
| "learning_rate": 5.550335570469799e-06, |
| "loss": 1.2881, |
| "step": 2153 |
| }, |
| { |
| "epoch": 3.61, |
| "learning_rate": 5.543624161073826e-06, |
| "loss": 1.2432, |
| "step": 2154 |
| }, |
| { |
| "epoch": 3.62, |
| "learning_rate": 5.536912751677853e-06, |
| "loss": 1.2891, |
| "step": 2155 |
| }, |
| { |
| "epoch": 3.62, |
| "learning_rate": 5.53020134228188e-06, |
| "loss": 1.2725, |
| "step": 2156 |
| }, |
| { |
| "epoch": 3.62, |
| "learning_rate": 5.523489932885906e-06, |
| "loss": 1.3184, |
| "step": 2157 |
| }, |
| { |
| "epoch": 3.62, |
| "learning_rate": 5.516778523489933e-06, |
| "loss": 1.3037, |
| "step": 2158 |
| }, |
| { |
| "epoch": 3.62, |
| "learning_rate": 5.51006711409396e-06, |
| "loss": 1.2803, |
| "step": 2159 |
| }, |
| { |
| "epoch": 3.62, |
| "learning_rate": 5.503355704697987e-06, |
| "loss": 1.2871, |
| "step": 2160 |
| }, |
| { |
| "epoch": 3.63, |
| "learning_rate": 5.496644295302014e-06, |
| "loss": 1.2578, |
| "step": 2161 |
| }, |
| { |
| "epoch": 3.63, |
| "learning_rate": 5.489932885906041e-06, |
| "loss": 1.2822, |
| "step": 2162 |
| }, |
| { |
| "epoch": 3.63, |
| "learning_rate": 5.483221476510068e-06, |
| "loss": 1.2598, |
| "step": 2163 |
| }, |
| { |
| "epoch": 3.63, |
| "learning_rate": 5.476510067114095e-06, |
| "loss": 1.2744, |
| "step": 2164 |
| }, |
| { |
| "epoch": 3.63, |
| "learning_rate": 5.4697986577181215e-06, |
| "loss": 1.3047, |
| "step": 2165 |
| }, |
| { |
| "epoch": 3.63, |
| "learning_rate": 5.463087248322148e-06, |
| "loss": 1.2783, |
| "step": 2166 |
| }, |
| { |
| "epoch": 3.64, |
| "learning_rate": 5.456375838926174e-06, |
| "loss": 1.3008, |
| "step": 2167 |
| }, |
| { |
| "epoch": 3.64, |
| "learning_rate": 5.449664429530201e-06, |
| "loss": 1.2715, |
| "step": 2168 |
| }, |
| { |
| "epoch": 3.64, |
| "learning_rate": 5.442953020134228e-06, |
| "loss": 1.2607, |
| "step": 2169 |
| }, |
| { |
| "epoch": 3.64, |
| "learning_rate": 5.436241610738256e-06, |
| "loss": 1.3066, |
| "step": 2170 |
| }, |
| { |
| "epoch": 3.64, |
| "learning_rate": 5.429530201342283e-06, |
| "loss": 1.2842, |
| "step": 2171 |
| }, |
| { |
| "epoch": 3.64, |
| "learning_rate": 5.4228187919463095e-06, |
| "loss": 1.2891, |
| "step": 2172 |
| }, |
| { |
| "epoch": 3.65, |
| "learning_rate": 5.416107382550336e-06, |
| "loss": 1.2842, |
| "step": 2173 |
| }, |
| { |
| "epoch": 3.65, |
| "learning_rate": 5.409395973154363e-06, |
| "loss": 1.3174, |
| "step": 2174 |
| }, |
| { |
| "epoch": 3.65, |
| "learning_rate": 5.40268456375839e-06, |
| "loss": 1.2842, |
| "step": 2175 |
| }, |
| { |
| "epoch": 3.65, |
| "learning_rate": 5.395973154362416e-06, |
| "loss": 1.3145, |
| "step": 2176 |
| }, |
| { |
| "epoch": 3.65, |
| "learning_rate": 5.389261744966443e-06, |
| "loss": 1.2891, |
| "step": 2177 |
| }, |
| { |
| "epoch": 3.65, |
| "learning_rate": 5.38255033557047e-06, |
| "loss": 1.2812, |
| "step": 2178 |
| }, |
| { |
| "epoch": 3.66, |
| "learning_rate": 5.375838926174497e-06, |
| "loss": 1.2676, |
| "step": 2179 |
| }, |
| { |
| "epoch": 3.66, |
| "learning_rate": 5.369127516778524e-06, |
| "loss": 1.2676, |
| "step": 2180 |
| }, |
| { |
| "epoch": 3.66, |
| "learning_rate": 5.362416107382551e-06, |
| "loss": 1.3096, |
| "step": 2181 |
| }, |
| { |
| "epoch": 3.66, |
| "learning_rate": 5.355704697986578e-06, |
| "loss": 1.293, |
| "step": 2182 |
| }, |
| { |
| "epoch": 3.66, |
| "learning_rate": 5.348993288590605e-06, |
| "loss": 1.2734, |
| "step": 2183 |
| }, |
| { |
| "epoch": 3.66, |
| "learning_rate": 5.342281879194632e-06, |
| "loss": 1.293, |
| "step": 2184 |
| }, |
| { |
| "epoch": 3.67, |
| "learning_rate": 5.335570469798658e-06, |
| "loss": 1.292, |
| "step": 2185 |
| }, |
| { |
| "epoch": 3.67, |
| "learning_rate": 5.328859060402685e-06, |
| "loss": 1.2988, |
| "step": 2186 |
| }, |
| { |
| "epoch": 3.67, |
| "learning_rate": 5.3221476510067115e-06, |
| "loss": 1.3008, |
| "step": 2187 |
| }, |
| { |
| "epoch": 3.67, |
| "learning_rate": 5.315436241610738e-06, |
| "loss": 1.2959, |
| "step": 2188 |
| }, |
| { |
| "epoch": 3.67, |
| "learning_rate": 5.308724832214765e-06, |
| "loss": 1.2773, |
| "step": 2189 |
| }, |
| { |
| "epoch": 3.67, |
| "learning_rate": 5.302013422818793e-06, |
| "loss": 1.2949, |
| "step": 2190 |
| }, |
| { |
| "epoch": 3.68, |
| "learning_rate": 5.29530201342282e-06, |
| "loss": 1.3301, |
| "step": 2191 |
| }, |
| { |
| "epoch": 3.68, |
| "learning_rate": 5.2885906040268466e-06, |
| "loss": 1.2988, |
| "step": 2192 |
| }, |
| { |
| "epoch": 3.68, |
| "learning_rate": 5.2818791946308734e-06, |
| "loss": 1.2705, |
| "step": 2193 |
| }, |
| { |
| "epoch": 3.68, |
| "learning_rate": 5.2751677852348994e-06, |
| "loss": 1.2861, |
| "step": 2194 |
| }, |
| { |
| "epoch": 3.68, |
| "learning_rate": 5.268456375838926e-06, |
| "loss": 1.2949, |
| "step": 2195 |
| }, |
| { |
| "epoch": 3.68, |
| "learning_rate": 5.261744966442953e-06, |
| "loss": 1.2959, |
| "step": 2196 |
| }, |
| { |
| "epoch": 3.69, |
| "learning_rate": 5.25503355704698e-06, |
| "loss": 1.2959, |
| "step": 2197 |
| }, |
| { |
| "epoch": 3.69, |
| "learning_rate": 5.248322147651007e-06, |
| "loss": 1.2988, |
| "step": 2198 |
| }, |
| { |
| "epoch": 3.69, |
| "learning_rate": 5.241610738255034e-06, |
| "loss": 1.2734, |
| "step": 2199 |
| }, |
| { |
| "epoch": 3.69, |
| "learning_rate": 5.234899328859061e-06, |
| "loss": 1.2871, |
| "step": 2200 |
| }, |
| { |
| "epoch": 3.69, |
| "learning_rate": 5.228187919463088e-06, |
| "loss": 1.2949, |
| "step": 2201 |
| }, |
| { |
| "epoch": 3.69, |
| "learning_rate": 5.221476510067115e-06, |
| "loss": 1.3027, |
| "step": 2202 |
| }, |
| { |
| "epoch": 3.7, |
| "learning_rate": 5.214765100671141e-06, |
| "loss": 1.29, |
| "step": 2203 |
| }, |
| { |
| "epoch": 3.7, |
| "learning_rate": 5.208053691275168e-06, |
| "loss": 1.251, |
| "step": 2204 |
| }, |
| { |
| "epoch": 3.7, |
| "learning_rate": 5.201342281879195e-06, |
| "loss": 1.2939, |
| "step": 2205 |
| }, |
| { |
| "epoch": 3.7, |
| "learning_rate": 5.194630872483222e-06, |
| "loss": 1.2588, |
| "step": 2206 |
| }, |
| { |
| "epoch": 3.7, |
| "learning_rate": 5.1879194630872485e-06, |
| "loss": 1.293, |
| "step": 2207 |
| }, |
| { |
| "epoch": 3.7, |
| "learning_rate": 5.181208053691275e-06, |
| "loss": 1.2861, |
| "step": 2208 |
| }, |
| { |
| "epoch": 3.71, |
| "learning_rate": 5.174496644295302e-06, |
| "loss": 1.3115, |
| "step": 2209 |
| }, |
| { |
| "epoch": 3.71, |
| "learning_rate": 5.16778523489933e-06, |
| "loss": 1.3281, |
| "step": 2210 |
| }, |
| { |
| "epoch": 3.71, |
| "learning_rate": 5.161073825503357e-06, |
| "loss": 1.3008, |
| "step": 2211 |
| }, |
| { |
| "epoch": 3.71, |
| "learning_rate": 5.154362416107384e-06, |
| "loss": 1.2979, |
| "step": 2212 |
| }, |
| { |
| "epoch": 3.71, |
| "learning_rate": 5.14765100671141e-06, |
| "loss": 1.2842, |
| "step": 2213 |
| }, |
| { |
| "epoch": 3.71, |
| "learning_rate": 5.1409395973154365e-06, |
| "loss": 1.3262, |
| "step": 2214 |
| }, |
| { |
| "epoch": 3.72, |
| "learning_rate": 5.134228187919463e-06, |
| "loss": 1.2725, |
| "step": 2215 |
| }, |
| { |
| "epoch": 3.72, |
| "learning_rate": 5.12751677852349e-06, |
| "loss": 1.2686, |
| "step": 2216 |
| }, |
| { |
| "epoch": 3.72, |
| "learning_rate": 5.120805369127517e-06, |
| "loss": 1.2549, |
| "step": 2217 |
| }, |
| { |
| "epoch": 3.72, |
| "learning_rate": 5.114093959731544e-06, |
| "loss": 1.2656, |
| "step": 2218 |
| }, |
| { |
| "epoch": 3.72, |
| "learning_rate": 5.107382550335571e-06, |
| "loss": 1.2539, |
| "step": 2219 |
| }, |
| { |
| "epoch": 3.72, |
| "learning_rate": 5.1006711409395985e-06, |
| "loss": 1.2861, |
| "step": 2220 |
| }, |
| { |
| "epoch": 3.73, |
| "learning_rate": 5.093959731543625e-06, |
| "loss": 1.2793, |
| "step": 2221 |
| }, |
| { |
| "epoch": 3.73, |
| "learning_rate": 5.087248322147651e-06, |
| "loss": 1.2568, |
| "step": 2222 |
| }, |
| { |
| "epoch": 3.73, |
| "learning_rate": 5.080536912751678e-06, |
| "loss": 1.2578, |
| "step": 2223 |
| }, |
| { |
| "epoch": 3.73, |
| "learning_rate": 5.073825503355705e-06, |
| "loss": 1.3154, |
| "step": 2224 |
| }, |
| { |
| "epoch": 3.73, |
| "learning_rate": 5.067114093959732e-06, |
| "loss": 1.2578, |
| "step": 2225 |
| }, |
| { |
| "epoch": 3.73, |
| "learning_rate": 5.060402684563759e-06, |
| "loss": 1.2725, |
| "step": 2226 |
| }, |
| { |
| "epoch": 3.74, |
| "learning_rate": 5.053691275167786e-06, |
| "loss": 1.2754, |
| "step": 2227 |
| }, |
| { |
| "epoch": 3.74, |
| "learning_rate": 5.0469798657718124e-06, |
| "loss": 1.2715, |
| "step": 2228 |
| }, |
| { |
| "epoch": 3.74, |
| "learning_rate": 5.04026845637584e-06, |
| "loss": 1.3057, |
| "step": 2229 |
| }, |
| { |
| "epoch": 3.74, |
| "learning_rate": 5.033557046979867e-06, |
| "loss": 1.2988, |
| "step": 2230 |
| }, |
| { |
| "epoch": 3.74, |
| "learning_rate": 5.026845637583892e-06, |
| "loss": 1.2598, |
| "step": 2231 |
| }, |
| { |
| "epoch": 3.74, |
| "learning_rate": 5.02013422818792e-06, |
| "loss": 1.2861, |
| "step": 2232 |
| }, |
| { |
| "epoch": 3.75, |
| "learning_rate": 5.013422818791947e-06, |
| "loss": 1.2861, |
| "step": 2233 |
| }, |
| { |
| "epoch": 3.75, |
| "learning_rate": 5.0067114093959736e-06, |
| "loss": 1.2832, |
| "step": 2234 |
| }, |
| { |
| "epoch": 3.75, |
| "learning_rate": 5e-06, |
| "loss": 1.2979, |
| "step": 2235 |
| }, |
| { |
| "epoch": 3.75, |
| "learning_rate": 4.993288590604027e-06, |
| "loss": 1.2764, |
| "step": 2236 |
| }, |
| { |
| "epoch": 3.75, |
| "learning_rate": 4.986577181208054e-06, |
| "loss": 1.3203, |
| "step": 2237 |
| }, |
| { |
| "epoch": 3.76, |
| "learning_rate": 4.979865771812081e-06, |
| "loss": 1.2812, |
| "step": 2238 |
| }, |
| { |
| "epoch": 3.76, |
| "learning_rate": 4.973154362416108e-06, |
| "loss": 1.2354, |
| "step": 2239 |
| }, |
| { |
| "epoch": 3.76, |
| "learning_rate": 4.966442953020135e-06, |
| "loss": 1.2656, |
| "step": 2240 |
| }, |
| { |
| "epoch": 3.76, |
| "learning_rate": 4.9597315436241615e-06, |
| "loss": 1.2725, |
| "step": 2241 |
| }, |
| { |
| "epoch": 3.76, |
| "learning_rate": 4.953020134228188e-06, |
| "loss": 1.2959, |
| "step": 2242 |
| }, |
| { |
| "epoch": 3.76, |
| "learning_rate": 4.946308724832215e-06, |
| "loss": 1.3027, |
| "step": 2243 |
| }, |
| { |
| "epoch": 3.77, |
| "learning_rate": 4.939597315436242e-06, |
| "loss": 1.2842, |
| "step": 2244 |
| }, |
| { |
| "epoch": 3.77, |
| "learning_rate": 4.932885906040269e-06, |
| "loss": 1.2646, |
| "step": 2245 |
| }, |
| { |
| "epoch": 3.77, |
| "learning_rate": 4.926174496644296e-06, |
| "loss": 1.2744, |
| "step": 2246 |
| }, |
| { |
| "epoch": 3.77, |
| "learning_rate": 4.919463087248323e-06, |
| "loss": 1.2852, |
| "step": 2247 |
| }, |
| { |
| "epoch": 3.77, |
| "learning_rate": 4.9127516778523495e-06, |
| "loss": 1.2646, |
| "step": 2248 |
| }, |
| { |
| "epoch": 3.77, |
| "learning_rate": 4.906040268456376e-06, |
| "loss": 1.2607, |
| "step": 2249 |
| }, |
| { |
| "epoch": 3.78, |
| "learning_rate": 4.899328859060403e-06, |
| "loss": 1.3174, |
| "step": 2250 |
| }, |
| { |
| "epoch": 3.78, |
| "learning_rate": 4.89261744966443e-06, |
| "loss": 1.3213, |
| "step": 2251 |
| }, |
| { |
| "epoch": 3.78, |
| "learning_rate": 4.885906040268457e-06, |
| "loss": 1.2783, |
| "step": 2252 |
| }, |
| { |
| "epoch": 3.78, |
| "learning_rate": 4.879194630872484e-06, |
| "loss": 1.2998, |
| "step": 2253 |
| }, |
| { |
| "epoch": 3.78, |
| "learning_rate": 4.872483221476511e-06, |
| "loss": 1.2812, |
| "step": 2254 |
| }, |
| { |
| "epoch": 3.78, |
| "learning_rate": 4.8657718120805375e-06, |
| "loss": 1.2939, |
| "step": 2255 |
| }, |
| { |
| "epoch": 3.79, |
| "learning_rate": 4.859060402684564e-06, |
| "loss": 1.3057, |
| "step": 2256 |
| }, |
| { |
| "epoch": 3.79, |
| "learning_rate": 4.852348993288591e-06, |
| "loss": 1.2881, |
| "step": 2257 |
| }, |
| { |
| "epoch": 3.79, |
| "learning_rate": 4.845637583892618e-06, |
| "loss": 1.291, |
| "step": 2258 |
| }, |
| { |
| "epoch": 3.79, |
| "learning_rate": 4.838926174496645e-06, |
| "loss": 1.2764, |
| "step": 2259 |
| }, |
| { |
| "epoch": 3.79, |
| "learning_rate": 4.832214765100672e-06, |
| "loss": 1.3135, |
| "step": 2260 |
| }, |
| { |
| "epoch": 3.79, |
| "learning_rate": 4.825503355704699e-06, |
| "loss": 1.293, |
| "step": 2261 |
| }, |
| { |
| "epoch": 3.8, |
| "learning_rate": 4.8187919463087254e-06, |
| "loss": 1.2793, |
| "step": 2262 |
| }, |
| { |
| "epoch": 3.8, |
| "learning_rate": 4.812080536912752e-06, |
| "loss": 1.2715, |
| "step": 2263 |
| }, |
| { |
| "epoch": 3.8, |
| "learning_rate": 4.805369127516779e-06, |
| "loss": 1.3008, |
| "step": 2264 |
| }, |
| { |
| "epoch": 3.8, |
| "learning_rate": 4.798657718120805e-06, |
| "loss": 1.2891, |
| "step": 2265 |
| }, |
| { |
| "epoch": 3.8, |
| "learning_rate": 4.791946308724833e-06, |
| "loss": 1.2793, |
| "step": 2266 |
| }, |
| { |
| "epoch": 3.8, |
| "learning_rate": 4.78523489932886e-06, |
| "loss": 1.2959, |
| "step": 2267 |
| }, |
| { |
| "epoch": 3.81, |
| "learning_rate": 4.7785234899328866e-06, |
| "loss": 1.2627, |
| "step": 2268 |
| }, |
| { |
| "epoch": 3.81, |
| "learning_rate": 4.771812080536913e-06, |
| "loss": 1.2773, |
| "step": 2269 |
| }, |
| { |
| "epoch": 3.81, |
| "learning_rate": 4.765100671140939e-06, |
| "loss": 1.291, |
| "step": 2270 |
| }, |
| { |
| "epoch": 3.81, |
| "learning_rate": 4.758389261744967e-06, |
| "loss": 1.2881, |
| "step": 2271 |
| }, |
| { |
| "epoch": 3.81, |
| "learning_rate": 4.751677852348994e-06, |
| "loss": 1.2734, |
| "step": 2272 |
| }, |
| { |
| "epoch": 3.81, |
| "learning_rate": 4.744966442953021e-06, |
| "loss": 1.2783, |
| "step": 2273 |
| }, |
| { |
| "epoch": 3.82, |
| "learning_rate": 4.738255033557048e-06, |
| "loss": 1.2842, |
| "step": 2274 |
| }, |
| { |
| "epoch": 3.82, |
| "learning_rate": 4.731543624161074e-06, |
| "loss": 1.2539, |
| "step": 2275 |
| }, |
| { |
| "epoch": 3.82, |
| "learning_rate": 4.724832214765101e-06, |
| "loss": 1.2588, |
| "step": 2276 |
| }, |
| { |
| "epoch": 3.82, |
| "learning_rate": 4.718120805369128e-06, |
| "loss": 1.2939, |
| "step": 2277 |
| }, |
| { |
| "epoch": 3.82, |
| "learning_rate": 4.711409395973155e-06, |
| "loss": 1.2715, |
| "step": 2278 |
| }, |
| { |
| "epoch": 3.82, |
| "learning_rate": 4.704697986577181e-06, |
| "loss": 1.2842, |
| "step": 2279 |
| }, |
| { |
| "epoch": 3.83, |
| "learning_rate": 4.697986577181208e-06, |
| "loss": 1.2754, |
| "step": 2280 |
| }, |
| { |
| "epoch": 3.83, |
| "learning_rate": 4.691275167785236e-06, |
| "loss": 1.2783, |
| "step": 2281 |
| }, |
| { |
| "epoch": 3.83, |
| "learning_rate": 4.6845637583892625e-06, |
| "loss": 1.2969, |
| "step": 2282 |
| }, |
| { |
| "epoch": 3.83, |
| "learning_rate": 4.677852348993289e-06, |
| "loss": 1.2832, |
| "step": 2283 |
| }, |
| { |
| "epoch": 3.83, |
| "learning_rate": 4.671140939597315e-06, |
| "loss": 1.291, |
| "step": 2284 |
| }, |
| { |
| "epoch": 3.83, |
| "learning_rate": 4.664429530201342e-06, |
| "loss": 1.2861, |
| "step": 2285 |
| }, |
| { |
| "epoch": 3.84, |
| "learning_rate": 4.65771812080537e-06, |
| "loss": 1.3018, |
| "step": 2286 |
| }, |
| { |
| "epoch": 3.84, |
| "learning_rate": 4.651006711409397e-06, |
| "loss": 1.291, |
| "step": 2287 |
| }, |
| { |
| "epoch": 3.84, |
| "learning_rate": 4.644295302013423e-06, |
| "loss": 1.2881, |
| "step": 2288 |
| }, |
| { |
| "epoch": 3.84, |
| "learning_rate": 4.63758389261745e-06, |
| "loss": 1.2539, |
| "step": 2289 |
| }, |
| { |
| "epoch": 3.84, |
| "learning_rate": 4.6308724832214765e-06, |
| "loss": 1.2939, |
| "step": 2290 |
| }, |
| { |
| "epoch": 3.84, |
| "learning_rate": 4.624161073825504e-06, |
| "loss": 1.2705, |
| "step": 2291 |
| }, |
| { |
| "epoch": 3.85, |
| "learning_rate": 4.617449664429531e-06, |
| "loss": 1.2783, |
| "step": 2292 |
| }, |
| { |
| "epoch": 3.85, |
| "learning_rate": 4.610738255033557e-06, |
| "loss": 1.2773, |
| "step": 2293 |
| }, |
| { |
| "epoch": 3.85, |
| "learning_rate": 4.604026845637584e-06, |
| "loss": 1.2637, |
| "step": 2294 |
| }, |
| { |
| "epoch": 3.85, |
| "learning_rate": 4.597315436241611e-06, |
| "loss": 1.2725, |
| "step": 2295 |
| }, |
| { |
| "epoch": 3.85, |
| "learning_rate": 4.5906040268456384e-06, |
| "loss": 1.2842, |
| "step": 2296 |
| }, |
| { |
| "epoch": 3.85, |
| "learning_rate": 4.583892617449665e-06, |
| "loss": 1.2871, |
| "step": 2297 |
| }, |
| { |
| "epoch": 3.86, |
| "learning_rate": 4.577181208053691e-06, |
| "loss": 1.2695, |
| "step": 2298 |
| }, |
| { |
| "epoch": 3.86, |
| "learning_rate": 4.570469798657718e-06, |
| "loss": 1.2832, |
| "step": 2299 |
| }, |
| { |
| "epoch": 3.86, |
| "learning_rate": 4.563758389261745e-06, |
| "loss": 1.3008, |
| "step": 2300 |
| }, |
| { |
| "epoch": 3.86, |
| "learning_rate": 4.557046979865773e-06, |
| "loss": 1.3066, |
| "step": 2301 |
| }, |
| { |
| "epoch": 3.86, |
| "learning_rate": 4.550335570469799e-06, |
| "loss": 1.2881, |
| "step": 2302 |
| }, |
| { |
| "epoch": 3.86, |
| "learning_rate": 4.5436241610738256e-06, |
| "loss": 1.2686, |
| "step": 2303 |
| }, |
| { |
| "epoch": 3.87, |
| "learning_rate": 4.536912751677852e-06, |
| "loss": 1.2598, |
| "step": 2304 |
| }, |
| { |
| "epoch": 3.87, |
| "learning_rate": 4.530201342281879e-06, |
| "loss": 1.2734, |
| "step": 2305 |
| }, |
| { |
| "epoch": 3.87, |
| "learning_rate": 4.523489932885907e-06, |
| "loss": 1.2959, |
| "step": 2306 |
| }, |
| { |
| "epoch": 3.87, |
| "learning_rate": 4.516778523489933e-06, |
| "loss": 1.2881, |
| "step": 2307 |
| }, |
| { |
| "epoch": 3.87, |
| "learning_rate": 4.51006711409396e-06, |
| "loss": 1.2686, |
| "step": 2308 |
| }, |
| { |
| "epoch": 3.87, |
| "learning_rate": 4.503355704697987e-06, |
| "loss": 1.3232, |
| "step": 2309 |
| }, |
| { |
| "epoch": 3.88, |
| "learning_rate": 4.4966442953020135e-06, |
| "loss": 1.2881, |
| "step": 2310 |
| }, |
| { |
| "epoch": 3.88, |
| "learning_rate": 4.48993288590604e-06, |
| "loss": 1.2822, |
| "step": 2311 |
| }, |
| { |
| "epoch": 3.88, |
| "learning_rate": 4.483221476510067e-06, |
| "loss": 1.3164, |
| "step": 2312 |
| }, |
| { |
| "epoch": 3.88, |
| "learning_rate": 4.476510067114094e-06, |
| "loss": 1.2783, |
| "step": 2313 |
| }, |
| { |
| "epoch": 3.88, |
| "learning_rate": 4.469798657718121e-06, |
| "loss": 1.3057, |
| "step": 2314 |
| }, |
| { |
| "epoch": 3.88, |
| "learning_rate": 4.463087248322149e-06, |
| "loss": 1.292, |
| "step": 2315 |
| }, |
| { |
| "epoch": 3.89, |
| "learning_rate": 4.456375838926175e-06, |
| "loss": 1.291, |
| "step": 2316 |
| }, |
| { |
| "epoch": 3.89, |
| "learning_rate": 4.4496644295302015e-06, |
| "loss": 1.2949, |
| "step": 2317 |
| }, |
| { |
| "epoch": 3.89, |
| "learning_rate": 4.442953020134228e-06, |
| "loss": 1.2803, |
| "step": 2318 |
| }, |
| { |
| "epoch": 3.89, |
| "learning_rate": 4.436241610738255e-06, |
| "loss": 1.2646, |
| "step": 2319 |
| }, |
| { |
| "epoch": 3.89, |
| "learning_rate": 4.429530201342283e-06, |
| "loss": 1.2607, |
| "step": 2320 |
| }, |
| { |
| "epoch": 3.89, |
| "learning_rate": 4.422818791946309e-06, |
| "loss": 1.3027, |
| "step": 2321 |
| }, |
| { |
| "epoch": 3.9, |
| "learning_rate": 4.416107382550336e-06, |
| "loss": 1.2471, |
| "step": 2322 |
| }, |
| { |
| "epoch": 3.9, |
| "learning_rate": 4.409395973154363e-06, |
| "loss": 1.293, |
| "step": 2323 |
| }, |
| { |
| "epoch": 3.9, |
| "learning_rate": 4.4026845637583895e-06, |
| "loss": 1.2803, |
| "step": 2324 |
| }, |
| { |
| "epoch": 3.9, |
| "learning_rate": 4.395973154362416e-06, |
| "loss": 1.2939, |
| "step": 2325 |
| }, |
| { |
| "epoch": 3.9, |
| "learning_rate": 4.389261744966443e-06, |
| "loss": 1.2725, |
| "step": 2326 |
| }, |
| { |
| "epoch": 3.9, |
| "learning_rate": 4.38255033557047e-06, |
| "loss": 1.2881, |
| "step": 2327 |
| }, |
| { |
| "epoch": 3.91, |
| "learning_rate": 4.375838926174497e-06, |
| "loss": 1.2949, |
| "step": 2328 |
| }, |
| { |
| "epoch": 3.91, |
| "learning_rate": 4.369127516778524e-06, |
| "loss": 1.2568, |
| "step": 2329 |
| }, |
| { |
| "epoch": 3.91, |
| "learning_rate": 4.362416107382551e-06, |
| "loss": 1.2578, |
| "step": 2330 |
| }, |
| { |
| "epoch": 3.91, |
| "learning_rate": 4.3557046979865775e-06, |
| "loss": 1.2959, |
| "step": 2331 |
| }, |
| { |
| "epoch": 3.91, |
| "learning_rate": 4.348993288590604e-06, |
| "loss": 1.3027, |
| "step": 2332 |
| }, |
| { |
| "epoch": 3.91, |
| "learning_rate": 4.342281879194631e-06, |
| "loss": 1.3145, |
| "step": 2333 |
| }, |
| { |
| "epoch": 3.92, |
| "learning_rate": 4.335570469798658e-06, |
| "loss": 1.2559, |
| "step": 2334 |
| }, |
| { |
| "epoch": 3.92, |
| "learning_rate": 4.328859060402685e-06, |
| "loss": 1.2598, |
| "step": 2335 |
| }, |
| { |
| "epoch": 3.92, |
| "learning_rate": 4.322147651006712e-06, |
| "loss": 1.2764, |
| "step": 2336 |
| }, |
| { |
| "epoch": 3.92, |
| "learning_rate": 4.3154362416107386e-06, |
| "loss": 1.2842, |
| "step": 2337 |
| }, |
| { |
| "epoch": 3.92, |
| "learning_rate": 4.3087248322147654e-06, |
| "loss": 1.2715, |
| "step": 2338 |
| }, |
| { |
| "epoch": 3.92, |
| "learning_rate": 4.302013422818792e-06, |
| "loss": 1.2637, |
| "step": 2339 |
| }, |
| { |
| "epoch": 3.93, |
| "learning_rate": 4.295302013422819e-06, |
| "loss": 1.2832, |
| "step": 2340 |
| }, |
| { |
| "epoch": 3.93, |
| "learning_rate": 4.288590604026846e-06, |
| "loss": 1.3037, |
| "step": 2341 |
| }, |
| { |
| "epoch": 3.93, |
| "learning_rate": 4.281879194630873e-06, |
| "loss": 1.2627, |
| "step": 2342 |
| }, |
| { |
| "epoch": 3.93, |
| "learning_rate": 4.2751677852349e-06, |
| "loss": 1.2861, |
| "step": 2343 |
| }, |
| { |
| "epoch": 3.93, |
| "learning_rate": 4.2684563758389265e-06, |
| "loss": 1.2998, |
| "step": 2344 |
| }, |
| { |
| "epoch": 3.93, |
| "learning_rate": 4.261744966442953e-06, |
| "loss": 1.2998, |
| "step": 2345 |
| }, |
| { |
| "epoch": 3.94, |
| "learning_rate": 4.25503355704698e-06, |
| "loss": 1.2871, |
| "step": 2346 |
| }, |
| { |
| "epoch": 3.94, |
| "learning_rate": 4.248322147651007e-06, |
| "loss": 1.2959, |
| "step": 2347 |
| }, |
| { |
| "epoch": 3.94, |
| "learning_rate": 4.241610738255034e-06, |
| "loss": 1.2705, |
| "step": 2348 |
| }, |
| { |
| "epoch": 3.94, |
| "learning_rate": 4.234899328859061e-06, |
| "loss": 1.2852, |
| "step": 2349 |
| }, |
| { |
| "epoch": 3.94, |
| "learning_rate": 4.228187919463088e-06, |
| "loss": 1.293, |
| "step": 2350 |
| }, |
| { |
| "epoch": 3.94, |
| "learning_rate": 4.2214765100671145e-06, |
| "loss": 1.2598, |
| "step": 2351 |
| }, |
| { |
| "epoch": 3.95, |
| "learning_rate": 4.214765100671141e-06, |
| "loss": 1.2793, |
| "step": 2352 |
| }, |
| { |
| "epoch": 3.95, |
| "learning_rate": 4.208053691275168e-06, |
| "loss": 1.2568, |
| "step": 2353 |
| }, |
| { |
| "epoch": 3.95, |
| "learning_rate": 4.201342281879195e-06, |
| "loss": 1.2275, |
| "step": 2354 |
| }, |
| { |
| "epoch": 3.95, |
| "learning_rate": 4.194630872483222e-06, |
| "loss": 1.2715, |
| "step": 2355 |
| }, |
| { |
| "epoch": 3.95, |
| "learning_rate": 4.187919463087249e-06, |
| "loss": 1.2998, |
| "step": 2356 |
| }, |
| { |
| "epoch": 3.95, |
| "learning_rate": 4.181208053691276e-06, |
| "loss": 1.3027, |
| "step": 2357 |
| }, |
| { |
| "epoch": 3.96, |
| "learning_rate": 4.1744966442953025e-06, |
| "loss": 1.2666, |
| "step": 2358 |
| }, |
| { |
| "epoch": 3.96, |
| "learning_rate": 4.167785234899329e-06, |
| "loss": 1.2656, |
| "step": 2359 |
| }, |
| { |
| "epoch": 3.96, |
| "learning_rate": 4.161073825503356e-06, |
| "loss": 1.292, |
| "step": 2360 |
| }, |
| { |
| "epoch": 3.96, |
| "learning_rate": 4.154362416107383e-06, |
| "loss": 1.2852, |
| "step": 2361 |
| }, |
| { |
| "epoch": 3.96, |
| "learning_rate": 4.14765100671141e-06, |
| "loss": 1.2793, |
| "step": 2362 |
| }, |
| { |
| "epoch": 3.96, |
| "learning_rate": 4.140939597315437e-06, |
| "loss": 1.3193, |
| "step": 2363 |
| }, |
| { |
| "epoch": 3.97, |
| "learning_rate": 4.134228187919464e-06, |
| "loss": 1.2812, |
| "step": 2364 |
| }, |
| { |
| "epoch": 3.97, |
| "learning_rate": 4.1275167785234905e-06, |
| "loss": 1.29, |
| "step": 2365 |
| }, |
| { |
| "epoch": 3.97, |
| "learning_rate": 4.120805369127517e-06, |
| "loss": 1.2725, |
| "step": 2366 |
| }, |
| { |
| "epoch": 3.97, |
| "learning_rate": 4.114093959731544e-06, |
| "loss": 1.2959, |
| "step": 2367 |
| }, |
| { |
| "epoch": 3.97, |
| "learning_rate": 4.107382550335571e-06, |
| "loss": 1.2783, |
| "step": 2368 |
| }, |
| { |
| "epoch": 3.97, |
| "learning_rate": 4.100671140939598e-06, |
| "loss": 1.3096, |
| "step": 2369 |
| }, |
| { |
| "epoch": 3.98, |
| "learning_rate": 4.093959731543625e-06, |
| "loss": 1.2822, |
| "step": 2370 |
| }, |
| { |
| "epoch": 3.98, |
| "learning_rate": 4.087248322147651e-06, |
| "loss": 1.2959, |
| "step": 2371 |
| }, |
| { |
| "epoch": 3.98, |
| "learning_rate": 4.0805369127516784e-06, |
| "loss": 1.2803, |
| "step": 2372 |
| }, |
| { |
| "epoch": 3.98, |
| "learning_rate": 4.073825503355705e-06, |
| "loss": 1.2568, |
| "step": 2373 |
| }, |
| { |
| "epoch": 3.98, |
| "learning_rate": 4.067114093959732e-06, |
| "loss": 1.3105, |
| "step": 2374 |
| }, |
| { |
| "epoch": 3.98, |
| "learning_rate": 4.060402684563759e-06, |
| "loss": 1.2764, |
| "step": 2375 |
| }, |
| { |
| "epoch": 3.99, |
| "learning_rate": 4.053691275167785e-06, |
| "loss": 1.2451, |
| "step": 2376 |
| }, |
| { |
| "epoch": 3.99, |
| "learning_rate": 4.046979865771813e-06, |
| "loss": 1.3223, |
| "step": 2377 |
| }, |
| { |
| "epoch": 3.99, |
| "learning_rate": 4.0402684563758395e-06, |
| "loss": 1.2744, |
| "step": 2378 |
| }, |
| { |
| "epoch": 3.99, |
| "learning_rate": 4.033557046979866e-06, |
| "loss": 1.2979, |
| "step": 2379 |
| }, |
| { |
| "epoch": 3.99, |
| "learning_rate": 4.026845637583892e-06, |
| "loss": 1.2656, |
| "step": 2380 |
| }, |
| { |
| "epoch": 3.99, |
| "learning_rate": 4.020134228187919e-06, |
| "loss": 1.2832, |
| "step": 2381 |
| }, |
| { |
| "epoch": 4.0, |
| "learning_rate": 4.013422818791947e-06, |
| "loss": 1.2832, |
| "step": 2382 |
| }, |
| { |
| "epoch": 4.0, |
| "learning_rate": 4.006711409395974e-06, |
| "loss": 1.2695, |
| "step": 2383 |
| }, |
| { |
| "epoch": 4.0, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 1.3164, |
| "step": 2384 |
| }, |
| { |
| "epoch": 4.0, |
| "learning_rate": 3.993288590604027e-06, |
| "loss": 1.2568, |
| "step": 2385 |
| }, |
| { |
| "epoch": 4.0, |
| "learning_rate": 3.9865771812080535e-06, |
| "loss": 1.292, |
| "step": 2386 |
| }, |
| { |
| "epoch": 4.01, |
| "learning_rate": 3.979865771812081e-06, |
| "loss": 1.2646, |
| "step": 2387 |
| }, |
| { |
| "epoch": 4.01, |
| "learning_rate": 3.973154362416108e-06, |
| "loss": 1.252, |
| "step": 2388 |
| }, |
| { |
| "epoch": 4.01, |
| "learning_rate": 3.966442953020135e-06, |
| "loss": 1.2363, |
| "step": 2389 |
| }, |
| { |
| "epoch": 4.01, |
| "learning_rate": 3.959731543624161e-06, |
| "loss": 1.2588, |
| "step": 2390 |
| }, |
| { |
| "epoch": 4.01, |
| "learning_rate": 3.953020134228188e-06, |
| "loss": 1.2949, |
| "step": 2391 |
| }, |
| { |
| "epoch": 4.01, |
| "learning_rate": 3.9463087248322155e-06, |
| "loss": 1.2939, |
| "step": 2392 |
| }, |
| { |
| "epoch": 4.02, |
| "learning_rate": 3.939597315436242e-06, |
| "loss": 1.3184, |
| "step": 2393 |
| }, |
| { |
| "epoch": 4.02, |
| "learning_rate": 3.932885906040268e-06, |
| "loss": 1.2695, |
| "step": 2394 |
| }, |
| { |
| "epoch": 4.02, |
| "learning_rate": 3.926174496644295e-06, |
| "loss": 1.3174, |
| "step": 2395 |
| }, |
| { |
| "epoch": 4.02, |
| "learning_rate": 3.919463087248322e-06, |
| "loss": 1.2695, |
| "step": 2396 |
| }, |
| { |
| "epoch": 4.02, |
| "learning_rate": 3.91275167785235e-06, |
| "loss": 1.3076, |
| "step": 2397 |
| }, |
| { |
| "epoch": 4.02, |
| "learning_rate": 3.906040268456377e-06, |
| "loss": 1.251, |
| "step": 2398 |
| }, |
| { |
| "epoch": 4.03, |
| "learning_rate": 3.899328859060403e-06, |
| "loss": 1.2754, |
| "step": 2399 |
| }, |
| { |
| "epoch": 4.03, |
| "learning_rate": 3.8926174496644295e-06, |
| "loss": 1.2275, |
| "step": 2400 |
| }, |
| { |
| "epoch": 4.03, |
| "learning_rate": 3.885906040268457e-06, |
| "loss": 1.2832, |
| "step": 2401 |
| }, |
| { |
| "epoch": 4.03, |
| "learning_rate": 3.879194630872484e-06, |
| "loss": 1.2715, |
| "step": 2402 |
| }, |
| { |
| "epoch": 4.03, |
| "learning_rate": 3.87248322147651e-06, |
| "loss": 1.2783, |
| "step": 2403 |
| }, |
| { |
| "epoch": 4.03, |
| "learning_rate": 3.865771812080537e-06, |
| "loss": 1.2627, |
| "step": 2404 |
| }, |
| { |
| "epoch": 4.04, |
| "learning_rate": 3.859060402684564e-06, |
| "loss": 1.2725, |
| "step": 2405 |
| }, |
| { |
| "epoch": 4.04, |
| "learning_rate": 3.8523489932885914e-06, |
| "loss": 1.2998, |
| "step": 2406 |
| }, |
| { |
| "epoch": 4.04, |
| "learning_rate": 3.845637583892618e-06, |
| "loss": 1.2666, |
| "step": 2407 |
| }, |
| { |
| "epoch": 4.04, |
| "learning_rate": 3.838926174496644e-06, |
| "loss": 1.2637, |
| "step": 2408 |
| }, |
| { |
| "epoch": 4.04, |
| "learning_rate": 3.832214765100671e-06, |
| "loss": 1.2969, |
| "step": 2409 |
| }, |
| { |
| "epoch": 4.04, |
| "learning_rate": 3.825503355704698e-06, |
| "loss": 1.3125, |
| "step": 2410 |
| }, |
| { |
| "epoch": 4.05, |
| "learning_rate": 3.818791946308726e-06, |
| "loss": 1.29, |
| "step": 2411 |
| }, |
| { |
| "epoch": 4.05, |
| "learning_rate": 3.812080536912752e-06, |
| "loss": 1.2832, |
| "step": 2412 |
| }, |
| { |
| "epoch": 4.05, |
| "learning_rate": 3.8053691275167786e-06, |
| "loss": 1.2744, |
| "step": 2413 |
| }, |
| { |
| "epoch": 4.05, |
| "learning_rate": 3.798657718120806e-06, |
| "loss": 1.2861, |
| "step": 2414 |
| }, |
| { |
| "epoch": 4.05, |
| "learning_rate": 3.7919463087248327e-06, |
| "loss": 1.2783, |
| "step": 2415 |
| }, |
| { |
| "epoch": 4.05, |
| "learning_rate": 3.7852348993288595e-06, |
| "loss": 1.2832, |
| "step": 2416 |
| }, |
| { |
| "epoch": 4.06, |
| "learning_rate": 3.778523489932886e-06, |
| "loss": 1.3184, |
| "step": 2417 |
| }, |
| { |
| "epoch": 4.06, |
| "learning_rate": 3.771812080536913e-06, |
| "loss": 1.2764, |
| "step": 2418 |
| }, |
| { |
| "epoch": 4.06, |
| "learning_rate": 3.76510067114094e-06, |
| "loss": 1.2432, |
| "step": 2419 |
| }, |
| { |
| "epoch": 4.06, |
| "learning_rate": 3.758389261744967e-06, |
| "loss": 1.2764, |
| "step": 2420 |
| }, |
| { |
| "epoch": 4.06, |
| "learning_rate": 3.751677852348994e-06, |
| "loss": 1.251, |
| "step": 2421 |
| }, |
| { |
| "epoch": 4.06, |
| "learning_rate": 3.7449664429530202e-06, |
| "loss": 1.2754, |
| "step": 2422 |
| }, |
| { |
| "epoch": 4.07, |
| "learning_rate": 3.738255033557047e-06, |
| "loss": 1.2686, |
| "step": 2423 |
| }, |
| { |
| "epoch": 4.07, |
| "learning_rate": 3.7315436241610744e-06, |
| "loss": 1.2852, |
| "step": 2424 |
| }, |
| { |
| "epoch": 4.07, |
| "learning_rate": 3.7248322147651012e-06, |
| "loss": 1.2812, |
| "step": 2425 |
| }, |
| { |
| "epoch": 4.07, |
| "learning_rate": 3.7181208053691276e-06, |
| "loss": 1.2715, |
| "step": 2426 |
| }, |
| { |
| "epoch": 4.07, |
| "learning_rate": 3.7114093959731545e-06, |
| "loss": 1.2646, |
| "step": 2427 |
| }, |
| { |
| "epoch": 4.07, |
| "learning_rate": 3.7046979865771814e-06, |
| "loss": 1.2764, |
| "step": 2428 |
| }, |
| { |
| "epoch": 4.08, |
| "learning_rate": 3.6979865771812086e-06, |
| "loss": 1.2412, |
| "step": 2429 |
| }, |
| { |
| "epoch": 4.08, |
| "learning_rate": 3.6912751677852355e-06, |
| "loss": 1.2373, |
| "step": 2430 |
| }, |
| { |
| "epoch": 4.08, |
| "learning_rate": 3.684563758389262e-06, |
| "loss": 1.2803, |
| "step": 2431 |
| }, |
| { |
| "epoch": 4.08, |
| "learning_rate": 3.6778523489932888e-06, |
| "loss": 1.2598, |
| "step": 2432 |
| }, |
| { |
| "epoch": 4.08, |
| "learning_rate": 3.6711409395973156e-06, |
| "loss": 1.2598, |
| "step": 2433 |
| }, |
| { |
| "epoch": 4.08, |
| "learning_rate": 3.664429530201343e-06, |
| "loss": 1.2686, |
| "step": 2434 |
| }, |
| { |
| "epoch": 4.09, |
| "learning_rate": 3.6577181208053697e-06, |
| "loss": 1.2676, |
| "step": 2435 |
| }, |
| { |
| "epoch": 4.09, |
| "learning_rate": 3.651006711409396e-06, |
| "loss": 1.2656, |
| "step": 2436 |
| }, |
| { |
| "epoch": 4.09, |
| "learning_rate": 3.644295302013423e-06, |
| "loss": 1.248, |
| "step": 2437 |
| }, |
| { |
| "epoch": 4.09, |
| "learning_rate": 3.63758389261745e-06, |
| "loss": 1.2686, |
| "step": 2438 |
| }, |
| { |
| "epoch": 4.09, |
| "learning_rate": 3.630872483221477e-06, |
| "loss": 1.293, |
| "step": 2439 |
| }, |
| { |
| "epoch": 4.09, |
| "learning_rate": 3.6241610738255036e-06, |
| "loss": 1.2559, |
| "step": 2440 |
| }, |
| { |
| "epoch": 4.1, |
| "learning_rate": 3.6174496644295304e-06, |
| "loss": 1.293, |
| "step": 2441 |
| }, |
| { |
| "epoch": 4.1, |
| "learning_rate": 3.6107382550335573e-06, |
| "loss": 1.2686, |
| "step": 2442 |
| }, |
| { |
| "epoch": 4.1, |
| "learning_rate": 3.604026845637584e-06, |
| "loss": 1.2744, |
| "step": 2443 |
| }, |
| { |
| "epoch": 4.1, |
| "learning_rate": 3.5973154362416114e-06, |
| "loss": 1.2598, |
| "step": 2444 |
| }, |
| { |
| "epoch": 4.1, |
| "learning_rate": 3.590604026845638e-06, |
| "loss": 1.2529, |
| "step": 2445 |
| }, |
| { |
| "epoch": 4.1, |
| "learning_rate": 3.5838926174496647e-06, |
| "loss": 1.2686, |
| "step": 2446 |
| }, |
| { |
| "epoch": 4.11, |
| "learning_rate": 3.5771812080536916e-06, |
| "loss": 1.293, |
| "step": 2447 |
| }, |
| { |
| "epoch": 4.11, |
| "learning_rate": 3.5704697986577184e-06, |
| "loss": 1.2412, |
| "step": 2448 |
| }, |
| { |
| "epoch": 4.11, |
| "learning_rate": 3.563758389261745e-06, |
| "loss": 1.2881, |
| "step": 2449 |
| }, |
| { |
| "epoch": 4.11, |
| "learning_rate": 3.557046979865772e-06, |
| "loss": 1.2715, |
| "step": 2450 |
| }, |
| { |
| "epoch": 4.11, |
| "learning_rate": 3.550335570469799e-06, |
| "loss": 1.2676, |
| "step": 2451 |
| }, |
| { |
| "epoch": 4.11, |
| "learning_rate": 3.543624161073826e-06, |
| "loss": 1.2891, |
| "step": 2452 |
| }, |
| { |
| "epoch": 4.12, |
| "learning_rate": 3.5369127516778527e-06, |
| "loss": 1.2598, |
| "step": 2453 |
| }, |
| { |
| "epoch": 4.12, |
| "learning_rate": 3.530201342281879e-06, |
| "loss": 1.2559, |
| "step": 2454 |
| }, |
| { |
| "epoch": 4.12, |
| "learning_rate": 3.5234899328859064e-06, |
| "loss": 1.2637, |
| "step": 2455 |
| }, |
| { |
| "epoch": 4.12, |
| "learning_rate": 3.5167785234899332e-06, |
| "loss": 1.2627, |
| "step": 2456 |
| }, |
| { |
| "epoch": 4.12, |
| "learning_rate": 3.51006711409396e-06, |
| "loss": 1.2754, |
| "step": 2457 |
| }, |
| { |
| "epoch": 4.12, |
| "learning_rate": 3.503355704697987e-06, |
| "loss": 1.2783, |
| "step": 2458 |
| }, |
| { |
| "epoch": 4.13, |
| "learning_rate": 3.4966442953020134e-06, |
| "loss": 1.2881, |
| "step": 2459 |
| }, |
| { |
| "epoch": 4.13, |
| "learning_rate": 3.4899328859060407e-06, |
| "loss": 1.2998, |
| "step": 2460 |
| }, |
| { |
| "epoch": 4.13, |
| "learning_rate": 3.4832214765100675e-06, |
| "loss": 1.3232, |
| "step": 2461 |
| }, |
| { |
| "epoch": 4.13, |
| "learning_rate": 3.4765100671140944e-06, |
| "loss": 1.2744, |
| "step": 2462 |
| }, |
| { |
| "epoch": 4.13, |
| "learning_rate": 3.4697986577181208e-06, |
| "loss": 1.2744, |
| "step": 2463 |
| }, |
| { |
| "epoch": 4.13, |
| "learning_rate": 3.4630872483221476e-06, |
| "loss": 1.2744, |
| "step": 2464 |
| }, |
| { |
| "epoch": 4.14, |
| "learning_rate": 3.456375838926175e-06, |
| "loss": 1.2617, |
| "step": 2465 |
| }, |
| { |
| "epoch": 4.14, |
| "learning_rate": 3.4496644295302018e-06, |
| "loss": 1.2734, |
| "step": 2466 |
| }, |
| { |
| "epoch": 4.14, |
| "learning_rate": 3.4429530201342286e-06, |
| "loss": 1.2812, |
| "step": 2467 |
| }, |
| { |
| "epoch": 4.14, |
| "learning_rate": 3.436241610738255e-06, |
| "loss": 1.2871, |
| "step": 2468 |
| }, |
| { |
| "epoch": 4.14, |
| "learning_rate": 3.429530201342282e-06, |
| "loss": 1.2998, |
| "step": 2469 |
| }, |
| { |
| "epoch": 4.14, |
| "learning_rate": 3.422818791946309e-06, |
| "loss": 1.2822, |
| "step": 2470 |
| }, |
| { |
| "epoch": 4.15, |
| "learning_rate": 3.416107382550336e-06, |
| "loss": 1.2842, |
| "step": 2471 |
| }, |
| { |
| "epoch": 4.15, |
| "learning_rate": 3.4093959731543625e-06, |
| "loss": 1.2588, |
| "step": 2472 |
| }, |
| { |
| "epoch": 4.15, |
| "learning_rate": 3.4026845637583893e-06, |
| "loss": 1.2646, |
| "step": 2473 |
| }, |
| { |
| "epoch": 4.15, |
| "learning_rate": 3.395973154362416e-06, |
| "loss": 1.29, |
| "step": 2474 |
| }, |
| { |
| "epoch": 4.15, |
| "learning_rate": 3.3892617449664434e-06, |
| "loss": 1.292, |
| "step": 2475 |
| }, |
| { |
| "epoch": 4.15, |
| "learning_rate": 3.3825503355704703e-06, |
| "loss": 1.2598, |
| "step": 2476 |
| }, |
| { |
| "epoch": 4.16, |
| "learning_rate": 3.3758389261744967e-06, |
| "loss": 1.2539, |
| "step": 2477 |
| }, |
| { |
| "epoch": 4.16, |
| "learning_rate": 3.3691275167785236e-06, |
| "loss": 1.2588, |
| "step": 2478 |
| }, |
| { |
| "epoch": 4.16, |
| "learning_rate": 3.3624161073825504e-06, |
| "loss": 1.2402, |
| "step": 2479 |
| }, |
| { |
| "epoch": 4.16, |
| "learning_rate": 3.3557046979865777e-06, |
| "loss": 1.2715, |
| "step": 2480 |
| }, |
| { |
| "epoch": 4.16, |
| "learning_rate": 3.3489932885906046e-06, |
| "loss": 1.2637, |
| "step": 2481 |
| }, |
| { |
| "epoch": 4.16, |
| "learning_rate": 3.342281879194631e-06, |
| "loss": 1.2422, |
| "step": 2482 |
| }, |
| { |
| "epoch": 4.17, |
| "learning_rate": 3.335570469798658e-06, |
| "loss": 1.2949, |
| "step": 2483 |
| }, |
| { |
| "epoch": 4.17, |
| "learning_rate": 3.328859060402685e-06, |
| "loss": 1.2549, |
| "step": 2484 |
| }, |
| { |
| "epoch": 4.17, |
| "learning_rate": 3.322147651006712e-06, |
| "loss": 1.2959, |
| "step": 2485 |
| }, |
| { |
| "epoch": 4.17, |
| "learning_rate": 3.3154362416107384e-06, |
| "loss": 1.2578, |
| "step": 2486 |
| }, |
| { |
| "epoch": 4.17, |
| "learning_rate": 3.3087248322147653e-06, |
| "loss": 1.2676, |
| "step": 2487 |
| }, |
| { |
| "epoch": 4.17, |
| "learning_rate": 3.302013422818792e-06, |
| "loss": 1.2559, |
| "step": 2488 |
| }, |
| { |
| "epoch": 4.18, |
| "learning_rate": 3.2953020134228194e-06, |
| "loss": 1.248, |
| "step": 2489 |
| }, |
| { |
| "epoch": 4.18, |
| "learning_rate": 3.2885906040268462e-06, |
| "loss": 1.249, |
| "step": 2490 |
| }, |
| { |
| "epoch": 4.18, |
| "learning_rate": 3.2818791946308727e-06, |
| "loss": 1.2686, |
| "step": 2491 |
| }, |
| { |
| "epoch": 4.18, |
| "learning_rate": 3.2751677852348995e-06, |
| "loss": 1.2793, |
| "step": 2492 |
| }, |
| { |
| "epoch": 4.18, |
| "learning_rate": 3.2684563758389264e-06, |
| "loss": 1.293, |
| "step": 2493 |
| }, |
| { |
| "epoch": 4.18, |
| "learning_rate": 3.2617449664429537e-06, |
| "loss": 1.2891, |
| "step": 2494 |
| }, |
| { |
| "epoch": 4.19, |
| "learning_rate": 3.25503355704698e-06, |
| "loss": 1.2588, |
| "step": 2495 |
| }, |
| { |
| "epoch": 4.19, |
| "learning_rate": 3.248322147651007e-06, |
| "loss": 1.2793, |
| "step": 2496 |
| }, |
| { |
| "epoch": 4.19, |
| "learning_rate": 3.241610738255034e-06, |
| "loss": 1.25, |
| "step": 2497 |
| }, |
| { |
| "epoch": 4.19, |
| "learning_rate": 3.2348993288590606e-06, |
| "loss": 1.2646, |
| "step": 2498 |
| }, |
| { |
| "epoch": 4.19, |
| "learning_rate": 3.228187919463088e-06, |
| "loss": 1.2891, |
| "step": 2499 |
| }, |
| { |
| "epoch": 4.19, |
| "learning_rate": 3.2214765100671143e-06, |
| "loss": 1.2539, |
| "step": 2500 |
| }, |
| { |
| "epoch": 4.2, |
| "learning_rate": 3.214765100671141e-06, |
| "loss": 1.2549, |
| "step": 2501 |
| }, |
| { |
| "epoch": 4.2, |
| "learning_rate": 3.208053691275168e-06, |
| "loss": 1.248, |
| "step": 2502 |
| }, |
| { |
| "epoch": 4.2, |
| "learning_rate": 3.201342281879195e-06, |
| "loss": 1.2529, |
| "step": 2503 |
| }, |
| { |
| "epoch": 4.2, |
| "learning_rate": 3.194630872483222e-06, |
| "loss": 1.2822, |
| "step": 2504 |
| }, |
| { |
| "epoch": 4.2, |
| "learning_rate": 3.1879194630872486e-06, |
| "loss": 1.2998, |
| "step": 2505 |
| }, |
| { |
| "epoch": 4.2, |
| "learning_rate": 3.1812080536912755e-06, |
| "loss": 1.248, |
| "step": 2506 |
| }, |
| { |
| "epoch": 4.21, |
| "learning_rate": 3.1744966442953023e-06, |
| "loss": 1.249, |
| "step": 2507 |
| }, |
| { |
| "epoch": 4.21, |
| "learning_rate": 3.167785234899329e-06, |
| "loss": 1.2656, |
| "step": 2508 |
| }, |
| { |
| "epoch": 4.21, |
| "learning_rate": 3.1610738255033556e-06, |
| "loss": 1.2695, |
| "step": 2509 |
| }, |
| { |
| "epoch": 4.21, |
| "learning_rate": 3.154362416107383e-06, |
| "loss": 1.2754, |
| "step": 2510 |
| }, |
| { |
| "epoch": 4.21, |
| "learning_rate": 3.1476510067114097e-06, |
| "loss": 1.2568, |
| "step": 2511 |
| }, |
| { |
| "epoch": 4.21, |
| "learning_rate": 3.1409395973154366e-06, |
| "loss": 1.251, |
| "step": 2512 |
| }, |
| { |
| "epoch": 4.22, |
| "learning_rate": 3.1342281879194634e-06, |
| "loss": 1.2803, |
| "step": 2513 |
| }, |
| { |
| "epoch": 4.22, |
| "learning_rate": 3.12751677852349e-06, |
| "loss": 1.2773, |
| "step": 2514 |
| }, |
| { |
| "epoch": 4.22, |
| "learning_rate": 3.120805369127517e-06, |
| "loss": 1.2998, |
| "step": 2515 |
| }, |
| { |
| "epoch": 4.22, |
| "learning_rate": 3.114093959731544e-06, |
| "loss": 1.3066, |
| "step": 2516 |
| }, |
| { |
| "epoch": 4.22, |
| "learning_rate": 3.107382550335571e-06, |
| "loss": 1.2676, |
| "step": 2517 |
| }, |
| { |
| "epoch": 4.22, |
| "learning_rate": 3.1006711409395973e-06, |
| "loss": 1.2725, |
| "step": 2518 |
| }, |
| { |
| "epoch": 4.23, |
| "learning_rate": 3.093959731543624e-06, |
| "loss": 1.2549, |
| "step": 2519 |
| }, |
| { |
| "epoch": 4.23, |
| "learning_rate": 3.0872483221476514e-06, |
| "loss": 1.2607, |
| "step": 2520 |
| }, |
| { |
| "epoch": 4.23, |
| "learning_rate": 3.0805369127516783e-06, |
| "loss": 1.2627, |
| "step": 2521 |
| }, |
| { |
| "epoch": 4.23, |
| "learning_rate": 3.073825503355705e-06, |
| "loss": 1.2637, |
| "step": 2522 |
| }, |
| { |
| "epoch": 4.23, |
| "learning_rate": 3.0671140939597315e-06, |
| "loss": 1.2646, |
| "step": 2523 |
| }, |
| { |
| "epoch": 4.23, |
| "learning_rate": 3.0604026845637584e-06, |
| "loss": 1.292, |
| "step": 2524 |
| }, |
| { |
| "epoch": 4.24, |
| "learning_rate": 3.0536912751677857e-06, |
| "loss": 1.2891, |
| "step": 2525 |
| }, |
| { |
| "epoch": 4.24, |
| "learning_rate": 3.0469798657718125e-06, |
| "loss": 1.3203, |
| "step": 2526 |
| }, |
| { |
| "epoch": 4.24, |
| "learning_rate": 3.0402684563758394e-06, |
| "loss": 1.2705, |
| "step": 2527 |
| }, |
| { |
| "epoch": 4.24, |
| "learning_rate": 3.033557046979866e-06, |
| "loss": 1.2676, |
| "step": 2528 |
| }, |
| { |
| "epoch": 4.24, |
| "learning_rate": 3.0268456375838927e-06, |
| "loss": 1.2812, |
| "step": 2529 |
| }, |
| { |
| "epoch": 4.24, |
| "learning_rate": 3.02013422818792e-06, |
| "loss": 1.2539, |
| "step": 2530 |
| }, |
| { |
| "epoch": 4.25, |
| "learning_rate": 3.013422818791947e-06, |
| "loss": 1.2744, |
| "step": 2531 |
| }, |
| { |
| "epoch": 4.25, |
| "learning_rate": 3.0067114093959732e-06, |
| "loss": 1.2832, |
| "step": 2532 |
| }, |
| { |
| "epoch": 4.25, |
| "learning_rate": 3e-06, |
| "loss": 1.2705, |
| "step": 2533 |
| }, |
| { |
| "epoch": 4.25, |
| "learning_rate": 2.993288590604027e-06, |
| "loss": 1.2559, |
| "step": 2534 |
| }, |
| { |
| "epoch": 4.25, |
| "learning_rate": 2.986577181208054e-06, |
| "loss": 1.2568, |
| "step": 2535 |
| }, |
| { |
| "epoch": 4.26, |
| "learning_rate": 2.979865771812081e-06, |
| "loss": 1.2842, |
| "step": 2536 |
| }, |
| { |
| "epoch": 4.26, |
| "learning_rate": 2.9731543624161075e-06, |
| "loss": 1.3096, |
| "step": 2537 |
| }, |
| { |
| "epoch": 4.26, |
| "learning_rate": 2.9664429530201343e-06, |
| "loss": 1.2949, |
| "step": 2538 |
| }, |
| { |
| "epoch": 4.26, |
| "learning_rate": 2.959731543624161e-06, |
| "loss": 1.29, |
| "step": 2539 |
| }, |
| { |
| "epoch": 4.26, |
| "learning_rate": 2.9530201342281885e-06, |
| "loss": 1.2822, |
| "step": 2540 |
| }, |
| { |
| "epoch": 4.26, |
| "learning_rate": 2.946308724832215e-06, |
| "loss": 1.2568, |
| "step": 2541 |
| }, |
| { |
| "epoch": 4.27, |
| "learning_rate": 2.9395973154362418e-06, |
| "loss": 1.29, |
| "step": 2542 |
| }, |
| { |
| "epoch": 4.27, |
| "learning_rate": 2.9328859060402686e-06, |
| "loss": 1.2832, |
| "step": 2543 |
| }, |
| { |
| "epoch": 4.27, |
| "learning_rate": 2.9261744966442955e-06, |
| "loss": 1.2607, |
| "step": 2544 |
| }, |
| { |
| "epoch": 4.27, |
| "learning_rate": 2.9194630872483227e-06, |
| "loss": 1.2695, |
| "step": 2545 |
| }, |
| { |
| "epoch": 4.27, |
| "learning_rate": 2.912751677852349e-06, |
| "loss": 1.2402, |
| "step": 2546 |
| }, |
| { |
| "epoch": 4.27, |
| "learning_rate": 2.906040268456376e-06, |
| "loss": 1.2783, |
| "step": 2547 |
| }, |
| { |
| "epoch": 4.28, |
| "learning_rate": 2.899328859060403e-06, |
| "loss": 1.2559, |
| "step": 2548 |
| }, |
| { |
| "epoch": 4.28, |
| "learning_rate": 2.8926174496644297e-06, |
| "loss": 1.2793, |
| "step": 2549 |
| }, |
| { |
| "epoch": 4.28, |
| "learning_rate": 2.885906040268457e-06, |
| "loss": 1.2705, |
| "step": 2550 |
| }, |
| { |
| "epoch": 4.28, |
| "learning_rate": 2.8791946308724834e-06, |
| "loss": 1.2812, |
| "step": 2551 |
| }, |
| { |
| "epoch": 4.28, |
| "learning_rate": 2.8724832214765103e-06, |
| "loss": 1.2998, |
| "step": 2552 |
| }, |
| { |
| "epoch": 4.28, |
| "learning_rate": 2.865771812080537e-06, |
| "loss": 1.2266, |
| "step": 2553 |
| }, |
| { |
| "epoch": 4.29, |
| "learning_rate": 2.859060402684564e-06, |
| "loss": 1.2822, |
| "step": 2554 |
| }, |
| { |
| "epoch": 4.29, |
| "learning_rate": 2.8523489932885904e-06, |
| "loss": 1.2832, |
| "step": 2555 |
| }, |
| { |
| "epoch": 4.29, |
| "learning_rate": 2.8456375838926177e-06, |
| "loss": 1.2646, |
| "step": 2556 |
| }, |
| { |
| "epoch": 4.29, |
| "learning_rate": 2.8389261744966445e-06, |
| "loss": 1.2725, |
| "step": 2557 |
| }, |
| { |
| "epoch": 4.29, |
| "learning_rate": 2.8322147651006714e-06, |
| "loss": 1.2725, |
| "step": 2558 |
| }, |
| { |
| "epoch": 4.29, |
| "learning_rate": 2.8255033557046987e-06, |
| "loss": 1.2578, |
| "step": 2559 |
| }, |
| { |
| "epoch": 4.3, |
| "learning_rate": 2.8187919463087247e-06, |
| "loss": 1.2852, |
| "step": 2560 |
| }, |
| { |
| "epoch": 4.3, |
| "learning_rate": 2.812080536912752e-06, |
| "loss": 1.2754, |
| "step": 2561 |
| }, |
| { |
| "epoch": 4.3, |
| "learning_rate": 2.805369127516779e-06, |
| "loss": 1.2764, |
| "step": 2562 |
| }, |
| { |
| "epoch": 4.3, |
| "learning_rate": 2.7986577181208057e-06, |
| "loss": 1.2666, |
| "step": 2563 |
| }, |
| { |
| "epoch": 4.3, |
| "learning_rate": 2.791946308724832e-06, |
| "loss": 1.2549, |
| "step": 2564 |
| }, |
| { |
| "epoch": 4.3, |
| "learning_rate": 2.785234899328859e-06, |
| "loss": 1.2588, |
| "step": 2565 |
| }, |
| { |
| "epoch": 4.31, |
| "learning_rate": 2.7785234899328862e-06, |
| "loss": 1.2578, |
| "step": 2566 |
| }, |
| { |
| "epoch": 4.31, |
| "learning_rate": 2.771812080536913e-06, |
| "loss": 1.2686, |
| "step": 2567 |
| }, |
| { |
| "epoch": 4.31, |
| "learning_rate": 2.76510067114094e-06, |
| "loss": 1.2734, |
| "step": 2568 |
| }, |
| { |
| "epoch": 4.31, |
| "learning_rate": 2.7583892617449664e-06, |
| "loss": 1.2812, |
| "step": 2569 |
| }, |
| { |
| "epoch": 4.31, |
| "learning_rate": 2.7516778523489936e-06, |
| "loss": 1.2451, |
| "step": 2570 |
| }, |
| { |
| "epoch": 4.31, |
| "learning_rate": 2.7449664429530205e-06, |
| "loss": 1.25, |
| "step": 2571 |
| }, |
| { |
| "epoch": 4.32, |
| "learning_rate": 2.7382550335570473e-06, |
| "loss": 1.2861, |
| "step": 2572 |
| }, |
| { |
| "epoch": 4.32, |
| "learning_rate": 2.731543624161074e-06, |
| "loss": 1.2559, |
| "step": 2573 |
| }, |
| { |
| "epoch": 4.32, |
| "learning_rate": 2.7248322147651006e-06, |
| "loss": 1.2842, |
| "step": 2574 |
| }, |
| { |
| "epoch": 4.32, |
| "learning_rate": 2.718120805369128e-06, |
| "loss": 1.2725, |
| "step": 2575 |
| }, |
| { |
| "epoch": 4.32, |
| "learning_rate": 2.7114093959731548e-06, |
| "loss": 1.2656, |
| "step": 2576 |
| }, |
| { |
| "epoch": 4.32, |
| "learning_rate": 2.7046979865771816e-06, |
| "loss": 1.2676, |
| "step": 2577 |
| }, |
| { |
| "epoch": 4.33, |
| "learning_rate": 2.697986577181208e-06, |
| "loss": 1.2871, |
| "step": 2578 |
| }, |
| { |
| "epoch": 4.33, |
| "learning_rate": 2.691275167785235e-06, |
| "loss": 1.2695, |
| "step": 2579 |
| }, |
| { |
| "epoch": 4.33, |
| "learning_rate": 2.684563758389262e-06, |
| "loss": 1.2617, |
| "step": 2580 |
| }, |
| { |
| "epoch": 4.33, |
| "learning_rate": 2.677852348993289e-06, |
| "loss": 1.252, |
| "step": 2581 |
| }, |
| { |
| "epoch": 4.33, |
| "learning_rate": 2.671140939597316e-06, |
| "loss": 1.2871, |
| "step": 2582 |
| }, |
| { |
| "epoch": 4.33, |
| "learning_rate": 2.6644295302013423e-06, |
| "loss": 1.2686, |
| "step": 2583 |
| }, |
| { |
| "epoch": 4.34, |
| "learning_rate": 2.657718120805369e-06, |
| "loss": 1.3271, |
| "step": 2584 |
| }, |
| { |
| "epoch": 4.34, |
| "learning_rate": 2.6510067114093964e-06, |
| "loss": 1.2744, |
| "step": 2585 |
| }, |
| { |
| "epoch": 4.34, |
| "learning_rate": 2.6442953020134233e-06, |
| "loss": 1.2617, |
| "step": 2586 |
| }, |
| { |
| "epoch": 4.34, |
| "learning_rate": 2.6375838926174497e-06, |
| "loss": 1.2852, |
| "step": 2587 |
| }, |
| { |
| "epoch": 4.34, |
| "learning_rate": 2.6308724832214766e-06, |
| "loss": 1.252, |
| "step": 2588 |
| }, |
| { |
| "epoch": 4.34, |
| "learning_rate": 2.6241610738255034e-06, |
| "loss": 1.2549, |
| "step": 2589 |
| }, |
| { |
| "epoch": 4.35, |
| "learning_rate": 2.6174496644295307e-06, |
| "loss": 1.2881, |
| "step": 2590 |
| }, |
| { |
| "epoch": 4.35, |
| "learning_rate": 2.6107382550335576e-06, |
| "loss": 1.2832, |
| "step": 2591 |
| }, |
| { |
| "epoch": 4.35, |
| "learning_rate": 2.604026845637584e-06, |
| "loss": 1.2734, |
| "step": 2592 |
| }, |
| { |
| "epoch": 4.35, |
| "learning_rate": 2.597315436241611e-06, |
| "loss": 1.2754, |
| "step": 2593 |
| }, |
| { |
| "epoch": 4.35, |
| "learning_rate": 2.5906040268456377e-06, |
| "loss": 1.2363, |
| "step": 2594 |
| }, |
| { |
| "epoch": 4.35, |
| "learning_rate": 2.583892617449665e-06, |
| "loss": 1.2734, |
| "step": 2595 |
| }, |
| { |
| "epoch": 4.36, |
| "learning_rate": 2.577181208053692e-06, |
| "loss": 1.2471, |
| "step": 2596 |
| }, |
| { |
| "epoch": 4.36, |
| "learning_rate": 2.5704697986577182e-06, |
| "loss": 1.249, |
| "step": 2597 |
| }, |
| { |
| "epoch": 4.36, |
| "learning_rate": 2.563758389261745e-06, |
| "loss": 1.2646, |
| "step": 2598 |
| }, |
| { |
| "epoch": 4.36, |
| "learning_rate": 2.557046979865772e-06, |
| "loss": 1.249, |
| "step": 2599 |
| }, |
| { |
| "epoch": 4.36, |
| "learning_rate": 2.5503355704697992e-06, |
| "loss": 1.2607, |
| "step": 2600 |
| }, |
| { |
| "epoch": 4.36, |
| "learning_rate": 2.5436241610738257e-06, |
| "loss": 1.2998, |
| "step": 2601 |
| }, |
| { |
| "epoch": 4.37, |
| "learning_rate": 2.5369127516778525e-06, |
| "loss": 1.2559, |
| "step": 2602 |
| }, |
| { |
| "epoch": 4.37, |
| "learning_rate": 2.5302013422818794e-06, |
| "loss": 1.2734, |
| "step": 2603 |
| }, |
| { |
| "epoch": 4.37, |
| "learning_rate": 2.5234899328859062e-06, |
| "loss": 1.2793, |
| "step": 2604 |
| }, |
| { |
| "epoch": 4.37, |
| "learning_rate": 2.5167785234899335e-06, |
| "loss": 1.2861, |
| "step": 2605 |
| }, |
| { |
| "epoch": 4.37, |
| "learning_rate": 2.51006711409396e-06, |
| "loss": 1.2373, |
| "step": 2606 |
| }, |
| { |
| "epoch": 4.37, |
| "learning_rate": 2.5033557046979868e-06, |
| "loss": 1.2695, |
| "step": 2607 |
| }, |
| { |
| "epoch": 4.38, |
| "learning_rate": 2.4966442953020136e-06, |
| "loss": 1.2529, |
| "step": 2608 |
| }, |
| { |
| "epoch": 4.38, |
| "learning_rate": 2.4899328859060405e-06, |
| "loss": 1.2354, |
| "step": 2609 |
| }, |
| { |
| "epoch": 4.38, |
| "learning_rate": 2.4832214765100673e-06, |
| "loss": 1.2568, |
| "step": 2610 |
| }, |
| { |
| "epoch": 4.38, |
| "learning_rate": 2.476510067114094e-06, |
| "loss": 1.2656, |
| "step": 2611 |
| }, |
| { |
| "epoch": 4.38, |
| "learning_rate": 2.469798657718121e-06, |
| "loss": 1.3076, |
| "step": 2612 |
| }, |
| { |
| "epoch": 4.38, |
| "learning_rate": 2.463087248322148e-06, |
| "loss": 1.252, |
| "step": 2613 |
| }, |
| { |
| "epoch": 4.39, |
| "learning_rate": 2.4563758389261747e-06, |
| "loss": 1.2871, |
| "step": 2614 |
| }, |
| { |
| "epoch": 4.39, |
| "learning_rate": 2.4496644295302016e-06, |
| "loss": 1.252, |
| "step": 2615 |
| }, |
| { |
| "epoch": 4.39, |
| "learning_rate": 2.4429530201342285e-06, |
| "loss": 1.2441, |
| "step": 2616 |
| }, |
| { |
| "epoch": 4.39, |
| "learning_rate": 2.4362416107382553e-06, |
| "loss": 1.2402, |
| "step": 2617 |
| }, |
| { |
| "epoch": 4.39, |
| "learning_rate": 2.429530201342282e-06, |
| "loss": 1.2832, |
| "step": 2618 |
| }, |
| { |
| "epoch": 4.39, |
| "learning_rate": 2.422818791946309e-06, |
| "loss": 1.2861, |
| "step": 2619 |
| }, |
| { |
| "epoch": 4.4, |
| "learning_rate": 2.416107382550336e-06, |
| "loss": 1.29, |
| "step": 2620 |
| }, |
| { |
| "epoch": 4.4, |
| "learning_rate": 2.4093959731543627e-06, |
| "loss": 1.2666, |
| "step": 2621 |
| }, |
| { |
| "epoch": 4.4, |
| "learning_rate": 2.4026845637583896e-06, |
| "loss": 1.2764, |
| "step": 2622 |
| }, |
| { |
| "epoch": 4.4, |
| "learning_rate": 2.3959731543624164e-06, |
| "loss": 1.249, |
| "step": 2623 |
| }, |
| { |
| "epoch": 4.4, |
| "learning_rate": 2.3892617449664433e-06, |
| "loss": 1.25, |
| "step": 2624 |
| }, |
| { |
| "epoch": 4.4, |
| "learning_rate": 2.3825503355704697e-06, |
| "loss": 1.2959, |
| "step": 2625 |
| }, |
| { |
| "epoch": 4.41, |
| "learning_rate": 2.375838926174497e-06, |
| "loss": 1.3018, |
| "step": 2626 |
| }, |
| { |
| "epoch": 4.41, |
| "learning_rate": 2.369127516778524e-06, |
| "loss": 1.2959, |
| "step": 2627 |
| }, |
| { |
| "epoch": 4.41, |
| "learning_rate": 2.3624161073825507e-06, |
| "loss": 1.2363, |
| "step": 2628 |
| }, |
| { |
| "epoch": 4.41, |
| "learning_rate": 2.3557046979865775e-06, |
| "loss": 1.2725, |
| "step": 2629 |
| }, |
| { |
| "epoch": 4.41, |
| "learning_rate": 2.348993288590604e-06, |
| "loss": 1.2744, |
| "step": 2630 |
| }, |
| { |
| "epoch": 4.41, |
| "learning_rate": 2.3422818791946313e-06, |
| "loss": 1.2852, |
| "step": 2631 |
| }, |
| { |
| "epoch": 4.42, |
| "learning_rate": 2.3355704697986577e-06, |
| "loss": 1.3115, |
| "step": 2632 |
| }, |
| { |
| "epoch": 4.42, |
| "learning_rate": 2.328859060402685e-06, |
| "loss": 1.2461, |
| "step": 2633 |
| }, |
| { |
| "epoch": 4.42, |
| "learning_rate": 2.3221476510067114e-06, |
| "loss": 1.2979, |
| "step": 2634 |
| }, |
| { |
| "epoch": 4.42, |
| "learning_rate": 2.3154362416107382e-06, |
| "loss": 1.2607, |
| "step": 2635 |
| }, |
| { |
| "epoch": 4.42, |
| "learning_rate": 2.3087248322147655e-06, |
| "loss": 1.2832, |
| "step": 2636 |
| }, |
| { |
| "epoch": 4.42, |
| "learning_rate": 2.302013422818792e-06, |
| "loss": 1.2627, |
| "step": 2637 |
| }, |
| { |
| "epoch": 4.43, |
| "learning_rate": 2.2953020134228192e-06, |
| "loss": 1.2646, |
| "step": 2638 |
| }, |
| { |
| "epoch": 4.43, |
| "learning_rate": 2.2885906040268457e-06, |
| "loss": 1.2842, |
| "step": 2639 |
| }, |
| { |
| "epoch": 4.43, |
| "learning_rate": 2.2818791946308725e-06, |
| "loss": 1.2432, |
| "step": 2640 |
| }, |
| { |
| "epoch": 4.43, |
| "learning_rate": 2.2751677852348994e-06, |
| "loss": 1.2988, |
| "step": 2641 |
| }, |
| { |
| "epoch": 4.43, |
| "learning_rate": 2.268456375838926e-06, |
| "loss": 1.2812, |
| "step": 2642 |
| }, |
| { |
| "epoch": 4.43, |
| "learning_rate": 2.2617449664429535e-06, |
| "loss": 1.2578, |
| "step": 2643 |
| }, |
| { |
| "epoch": 4.44, |
| "learning_rate": 2.25503355704698e-06, |
| "loss": 1.2383, |
| "step": 2644 |
| }, |
| { |
| "epoch": 4.44, |
| "learning_rate": 2.2483221476510068e-06, |
| "loss": 1.2559, |
| "step": 2645 |
| }, |
| { |
| "epoch": 4.44, |
| "learning_rate": 2.2416107382550336e-06, |
| "loss": 1.2334, |
| "step": 2646 |
| }, |
| { |
| "epoch": 4.44, |
| "learning_rate": 2.2348993288590605e-06, |
| "loss": 1.29, |
| "step": 2647 |
| }, |
| { |
| "epoch": 4.44, |
| "learning_rate": 2.2281879194630873e-06, |
| "loss": 1.2734, |
| "step": 2648 |
| }, |
| { |
| "epoch": 4.44, |
| "learning_rate": 2.221476510067114e-06, |
| "loss": 1.2549, |
| "step": 2649 |
| }, |
| { |
| "epoch": 4.45, |
| "learning_rate": 2.2147651006711415e-06, |
| "loss": 1.2734, |
| "step": 2650 |
| }, |
| { |
| "epoch": 4.45, |
| "learning_rate": 2.208053691275168e-06, |
| "loss": 1.2539, |
| "step": 2651 |
| }, |
| { |
| "epoch": 4.45, |
| "learning_rate": 2.2013422818791947e-06, |
| "loss": 1.2832, |
| "step": 2652 |
| }, |
| { |
| "epoch": 4.45, |
| "learning_rate": 2.1946308724832216e-06, |
| "loss": 1.2832, |
| "step": 2653 |
| }, |
| { |
| "epoch": 4.45, |
| "learning_rate": 2.1879194630872484e-06, |
| "loss": 1.291, |
| "step": 2654 |
| }, |
| { |
| "epoch": 4.45, |
| "learning_rate": 2.1812080536912753e-06, |
| "loss": 1.2861, |
| "step": 2655 |
| }, |
| { |
| "epoch": 4.46, |
| "learning_rate": 2.174496644295302e-06, |
| "loss": 1.2812, |
| "step": 2656 |
| }, |
| { |
| "epoch": 4.46, |
| "learning_rate": 2.167785234899329e-06, |
| "loss": 1.248, |
| "step": 2657 |
| }, |
| { |
| "epoch": 4.46, |
| "learning_rate": 2.161073825503356e-06, |
| "loss": 1.3027, |
| "step": 2658 |
| }, |
| { |
| "epoch": 4.46, |
| "learning_rate": 2.1543624161073827e-06, |
| "loss": 1.2803, |
| "step": 2659 |
| }, |
| { |
| "epoch": 4.46, |
| "learning_rate": 2.1476510067114096e-06, |
| "loss": 1.2451, |
| "step": 2660 |
| }, |
| { |
| "epoch": 4.46, |
| "learning_rate": 2.1409395973154364e-06, |
| "loss": 1.2686, |
| "step": 2661 |
| }, |
| { |
| "epoch": 4.47, |
| "learning_rate": 2.1342281879194633e-06, |
| "loss": 1.2793, |
| "step": 2662 |
| }, |
| { |
| "epoch": 4.47, |
| "learning_rate": 2.12751677852349e-06, |
| "loss": 1.2354, |
| "step": 2663 |
| }, |
| { |
| "epoch": 4.47, |
| "learning_rate": 2.120805369127517e-06, |
| "loss": 1.2832, |
| "step": 2664 |
| }, |
| { |
| "epoch": 4.47, |
| "learning_rate": 2.114093959731544e-06, |
| "loss": 1.2578, |
| "step": 2665 |
| }, |
| { |
| "epoch": 4.47, |
| "learning_rate": 2.1073825503355707e-06, |
| "loss": 1.2764, |
| "step": 2666 |
| }, |
| { |
| "epoch": 4.47, |
| "learning_rate": 2.1006711409395975e-06, |
| "loss": 1.2754, |
| "step": 2667 |
| }, |
| { |
| "epoch": 4.48, |
| "learning_rate": 2.0939597315436244e-06, |
| "loss": 1.2666, |
| "step": 2668 |
| }, |
| { |
| "epoch": 4.48, |
| "learning_rate": 2.0872483221476512e-06, |
| "loss": 1.2441, |
| "step": 2669 |
| }, |
| { |
| "epoch": 4.48, |
| "learning_rate": 2.080536912751678e-06, |
| "loss": 1.2803, |
| "step": 2670 |
| }, |
| { |
| "epoch": 4.48, |
| "learning_rate": 2.073825503355705e-06, |
| "loss": 1.251, |
| "step": 2671 |
| }, |
| { |
| "epoch": 4.48, |
| "learning_rate": 2.067114093959732e-06, |
| "loss": 1.2383, |
| "step": 2672 |
| }, |
| { |
| "epoch": 4.48, |
| "learning_rate": 2.0604026845637587e-06, |
| "loss": 1.2666, |
| "step": 2673 |
| }, |
| { |
| "epoch": 4.49, |
| "learning_rate": 2.0536912751677855e-06, |
| "loss": 1.3027, |
| "step": 2674 |
| }, |
| { |
| "epoch": 4.49, |
| "learning_rate": 2.0469798657718124e-06, |
| "loss": 1.2764, |
| "step": 2675 |
| }, |
| { |
| "epoch": 4.49, |
| "learning_rate": 2.0402684563758392e-06, |
| "loss": 1.2627, |
| "step": 2676 |
| }, |
| { |
| "epoch": 4.49, |
| "learning_rate": 2.033557046979866e-06, |
| "loss": 1.2627, |
| "step": 2677 |
| }, |
| { |
| "epoch": 4.49, |
| "learning_rate": 2.0268456375838925e-06, |
| "loss": 1.2529, |
| "step": 2678 |
| }, |
| { |
| "epoch": 4.49, |
| "learning_rate": 2.0201342281879198e-06, |
| "loss": 1.2373, |
| "step": 2679 |
| }, |
| { |
| "epoch": 4.5, |
| "learning_rate": 2.013422818791946e-06, |
| "loss": 1.2578, |
| "step": 2680 |
| }, |
| { |
| "epoch": 4.5, |
| "learning_rate": 2.0067114093959735e-06, |
| "loss": 1.2471, |
| "step": 2681 |
| }, |
| { |
| "epoch": 4.5, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 1.2695, |
| "step": 2682 |
| }, |
| { |
| "epoch": 4.5, |
| "learning_rate": 1.9932885906040268e-06, |
| "loss": 1.2832, |
| "step": 2683 |
| }, |
| { |
| "epoch": 4.5, |
| "learning_rate": 1.986577181208054e-06, |
| "loss": 1.2783, |
| "step": 2684 |
| }, |
| { |
| "epoch": 4.51, |
| "learning_rate": 1.9798657718120805e-06, |
| "loss": 1.2734, |
| "step": 2685 |
| }, |
| { |
| "epoch": 4.51, |
| "learning_rate": 1.9731543624161077e-06, |
| "loss": 1.2715, |
| "step": 2686 |
| }, |
| { |
| "epoch": 4.51, |
| "learning_rate": 1.966442953020134e-06, |
| "loss": 1.2744, |
| "step": 2687 |
| }, |
| { |
| "epoch": 4.51, |
| "learning_rate": 1.959731543624161e-06, |
| "loss": 1.2412, |
| "step": 2688 |
| }, |
| { |
| "epoch": 4.51, |
| "learning_rate": 1.9530201342281883e-06, |
| "loss": 1.2734, |
| "step": 2689 |
| }, |
| { |
| "epoch": 4.51, |
| "learning_rate": 1.9463087248322147e-06, |
| "loss": 1.2646, |
| "step": 2690 |
| }, |
| { |
| "epoch": 4.52, |
| "learning_rate": 1.939597315436242e-06, |
| "loss": 1.2559, |
| "step": 2691 |
| }, |
| { |
| "epoch": 4.52, |
| "learning_rate": 1.9328859060402684e-06, |
| "loss": 1.2607, |
| "step": 2692 |
| }, |
| { |
| "epoch": 4.52, |
| "learning_rate": 1.9261744966442957e-06, |
| "loss": 1.2441, |
| "step": 2693 |
| }, |
| { |
| "epoch": 4.52, |
| "learning_rate": 1.919463087248322e-06, |
| "loss": 1.2832, |
| "step": 2694 |
| }, |
| { |
| "epoch": 4.52, |
| "learning_rate": 1.912751677852349e-06, |
| "loss": 1.2725, |
| "step": 2695 |
| }, |
| { |
| "epoch": 4.52, |
| "learning_rate": 1.906040268456376e-06, |
| "loss": 1.2998, |
| "step": 2696 |
| }, |
| { |
| "epoch": 4.53, |
| "learning_rate": 1.899328859060403e-06, |
| "loss": 1.2705, |
| "step": 2697 |
| }, |
| { |
| "epoch": 4.53, |
| "learning_rate": 1.8926174496644298e-06, |
| "loss": 1.2676, |
| "step": 2698 |
| }, |
| { |
| "epoch": 4.53, |
| "learning_rate": 1.8859060402684564e-06, |
| "loss": 1.2666, |
| "step": 2699 |
| }, |
| { |
| "epoch": 4.53, |
| "learning_rate": 1.8791946308724835e-06, |
| "loss": 1.2715, |
| "step": 2700 |
| }, |
| { |
| "epoch": 4.53, |
| "learning_rate": 1.8724832214765101e-06, |
| "loss": 1.2539, |
| "step": 2701 |
| }, |
| { |
| "epoch": 4.53, |
| "learning_rate": 1.8657718120805372e-06, |
| "loss": 1.2832, |
| "step": 2702 |
| }, |
| { |
| "epoch": 4.54, |
| "learning_rate": 1.8590604026845638e-06, |
| "loss": 1.2627, |
| "step": 2703 |
| }, |
| { |
| "epoch": 4.54, |
| "learning_rate": 1.8523489932885907e-06, |
| "loss": 1.2793, |
| "step": 2704 |
| }, |
| { |
| "epoch": 4.54, |
| "learning_rate": 1.8456375838926177e-06, |
| "loss": 1.2734, |
| "step": 2705 |
| }, |
| { |
| "epoch": 4.54, |
| "learning_rate": 1.8389261744966444e-06, |
| "loss": 1.2549, |
| "step": 2706 |
| }, |
| { |
| "epoch": 4.54, |
| "learning_rate": 1.8322147651006714e-06, |
| "loss": 1.2744, |
| "step": 2707 |
| }, |
| { |
| "epoch": 4.54, |
| "learning_rate": 1.825503355704698e-06, |
| "loss": 1.2607, |
| "step": 2708 |
| }, |
| { |
| "epoch": 4.55, |
| "learning_rate": 1.818791946308725e-06, |
| "loss": 1.2725, |
| "step": 2709 |
| }, |
| { |
| "epoch": 4.55, |
| "learning_rate": 1.8120805369127518e-06, |
| "loss": 1.2725, |
| "step": 2710 |
| }, |
| { |
| "epoch": 4.55, |
| "learning_rate": 1.8053691275167786e-06, |
| "loss": 1.2725, |
| "step": 2711 |
| }, |
| { |
| "epoch": 4.55, |
| "learning_rate": 1.7986577181208057e-06, |
| "loss": 1.2549, |
| "step": 2712 |
| }, |
| { |
| "epoch": 4.55, |
| "learning_rate": 1.7919463087248324e-06, |
| "loss": 1.2705, |
| "step": 2713 |
| }, |
| { |
| "epoch": 4.55, |
| "learning_rate": 1.7852348993288592e-06, |
| "loss": 1.2754, |
| "step": 2714 |
| }, |
| { |
| "epoch": 4.56, |
| "learning_rate": 1.778523489932886e-06, |
| "loss": 1.2949, |
| "step": 2715 |
| }, |
| { |
| "epoch": 4.56, |
| "learning_rate": 1.771812080536913e-06, |
| "loss": 1.2861, |
| "step": 2716 |
| }, |
| { |
| "epoch": 4.56, |
| "learning_rate": 1.7651006711409396e-06, |
| "loss": 1.2578, |
| "step": 2717 |
| }, |
| { |
| "epoch": 4.56, |
| "learning_rate": 1.7583892617449666e-06, |
| "loss": 1.2559, |
| "step": 2718 |
| }, |
| { |
| "epoch": 4.56, |
| "learning_rate": 1.7516778523489935e-06, |
| "loss": 1.2275, |
| "step": 2719 |
| }, |
| { |
| "epoch": 4.56, |
| "learning_rate": 1.7449664429530203e-06, |
| "loss": 1.2412, |
| "step": 2720 |
| }, |
| { |
| "epoch": 4.57, |
| "learning_rate": 1.7382550335570472e-06, |
| "loss": 1.2598, |
| "step": 2721 |
| }, |
| { |
| "epoch": 4.57, |
| "learning_rate": 1.7315436241610738e-06, |
| "loss": 1.2773, |
| "step": 2722 |
| }, |
| { |
| "epoch": 4.57, |
| "learning_rate": 1.7248322147651009e-06, |
| "loss": 1.2363, |
| "step": 2723 |
| }, |
| { |
| "epoch": 4.57, |
| "learning_rate": 1.7181208053691275e-06, |
| "loss": 1.2998, |
| "step": 2724 |
| }, |
| { |
| "epoch": 4.57, |
| "learning_rate": 1.7114093959731546e-06, |
| "loss": 1.2549, |
| "step": 2725 |
| }, |
| { |
| "epoch": 4.57, |
| "learning_rate": 1.7046979865771812e-06, |
| "loss": 1.2734, |
| "step": 2726 |
| }, |
| { |
| "epoch": 4.58, |
| "learning_rate": 1.697986577181208e-06, |
| "loss": 1.2812, |
| "step": 2727 |
| }, |
| { |
| "epoch": 4.58, |
| "learning_rate": 1.6912751677852351e-06, |
| "loss": 1.2881, |
| "step": 2728 |
| }, |
| { |
| "epoch": 4.58, |
| "learning_rate": 1.6845637583892618e-06, |
| "loss": 1.2949, |
| "step": 2729 |
| }, |
| { |
| "epoch": 4.58, |
| "learning_rate": 1.6778523489932889e-06, |
| "loss": 1.2715, |
| "step": 2730 |
| }, |
| { |
| "epoch": 4.58, |
| "learning_rate": 1.6711409395973155e-06, |
| "loss": 1.2666, |
| "step": 2731 |
| }, |
| { |
| "epoch": 4.58, |
| "learning_rate": 1.6644295302013426e-06, |
| "loss": 1.2822, |
| "step": 2732 |
| }, |
| { |
| "epoch": 4.59, |
| "learning_rate": 1.6577181208053692e-06, |
| "loss": 1.2402, |
| "step": 2733 |
| }, |
| { |
| "epoch": 4.59, |
| "learning_rate": 1.651006711409396e-06, |
| "loss": 1.3086, |
| "step": 2734 |
| }, |
| { |
| "epoch": 4.59, |
| "learning_rate": 1.6442953020134231e-06, |
| "loss": 1.2139, |
| "step": 2735 |
| }, |
| { |
| "epoch": 4.59, |
| "learning_rate": 1.6375838926174498e-06, |
| "loss": 1.292, |
| "step": 2736 |
| }, |
| { |
| "epoch": 4.59, |
| "learning_rate": 1.6308724832214768e-06, |
| "loss": 1.29, |
| "step": 2737 |
| }, |
| { |
| "epoch": 4.59, |
| "learning_rate": 1.6241610738255035e-06, |
| "loss": 1.2695, |
| "step": 2738 |
| }, |
| { |
| "epoch": 4.6, |
| "learning_rate": 1.6174496644295303e-06, |
| "loss": 1.2686, |
| "step": 2739 |
| }, |
| { |
| "epoch": 4.6, |
| "learning_rate": 1.6107382550335572e-06, |
| "loss": 1.2607, |
| "step": 2740 |
| }, |
| { |
| "epoch": 4.6, |
| "learning_rate": 1.604026845637584e-06, |
| "loss": 1.2549, |
| "step": 2741 |
| }, |
| { |
| "epoch": 4.6, |
| "learning_rate": 1.597315436241611e-06, |
| "loss": 1.29, |
| "step": 2742 |
| }, |
| { |
| "epoch": 4.6, |
| "learning_rate": 1.5906040268456377e-06, |
| "loss": 1.2646, |
| "step": 2743 |
| }, |
| { |
| "epoch": 4.6, |
| "learning_rate": 1.5838926174496646e-06, |
| "loss": 1.29, |
| "step": 2744 |
| }, |
| { |
| "epoch": 4.61, |
| "learning_rate": 1.5771812080536914e-06, |
| "loss": 1.2686, |
| "step": 2745 |
| }, |
| { |
| "epoch": 4.61, |
| "learning_rate": 1.5704697986577183e-06, |
| "loss": 1.2881, |
| "step": 2746 |
| }, |
| { |
| "epoch": 4.61, |
| "learning_rate": 1.563758389261745e-06, |
| "loss": 1.2832, |
| "step": 2747 |
| }, |
| { |
| "epoch": 4.61, |
| "learning_rate": 1.557046979865772e-06, |
| "loss": 1.2881, |
| "step": 2748 |
| }, |
| { |
| "epoch": 4.61, |
| "learning_rate": 1.5503355704697986e-06, |
| "loss": 1.3057, |
| "step": 2749 |
| }, |
| { |
| "epoch": 4.61, |
| "learning_rate": 1.5436241610738257e-06, |
| "loss": 1.2822, |
| "step": 2750 |
| }, |
| { |
| "epoch": 4.62, |
| "learning_rate": 1.5369127516778526e-06, |
| "loss": 1.2598, |
| "step": 2751 |
| }, |
| { |
| "epoch": 4.62, |
| "learning_rate": 1.5302013422818792e-06, |
| "loss": 1.248, |
| "step": 2752 |
| }, |
| { |
| "epoch": 4.62, |
| "learning_rate": 1.5234899328859063e-06, |
| "loss": 1.2734, |
| "step": 2753 |
| }, |
| { |
| "epoch": 4.62, |
| "learning_rate": 1.516778523489933e-06, |
| "loss": 1.252, |
| "step": 2754 |
| }, |
| { |
| "epoch": 4.62, |
| "learning_rate": 1.51006711409396e-06, |
| "loss": 1.2715, |
| "step": 2755 |
| }, |
| { |
| "epoch": 4.62, |
| "learning_rate": 1.5033557046979866e-06, |
| "loss": 1.2148, |
| "step": 2756 |
| }, |
| { |
| "epoch": 4.63, |
| "learning_rate": 1.4966442953020135e-06, |
| "loss": 1.2891, |
| "step": 2757 |
| }, |
| { |
| "epoch": 4.63, |
| "learning_rate": 1.4899328859060405e-06, |
| "loss": 1.2441, |
| "step": 2758 |
| }, |
| { |
| "epoch": 4.63, |
| "learning_rate": 1.4832214765100672e-06, |
| "loss": 1.2734, |
| "step": 2759 |
| }, |
| { |
| "epoch": 4.63, |
| "learning_rate": 1.4765100671140942e-06, |
| "loss": 1.2666, |
| "step": 2760 |
| }, |
| { |
| "epoch": 4.63, |
| "learning_rate": 1.4697986577181209e-06, |
| "loss": 1.2959, |
| "step": 2761 |
| }, |
| { |
| "epoch": 4.63, |
| "learning_rate": 1.4630872483221477e-06, |
| "loss": 1.2783, |
| "step": 2762 |
| }, |
| { |
| "epoch": 4.64, |
| "learning_rate": 1.4563758389261746e-06, |
| "loss": 1.2705, |
| "step": 2763 |
| }, |
| { |
| "epoch": 4.64, |
| "learning_rate": 1.4496644295302014e-06, |
| "loss": 1.293, |
| "step": 2764 |
| }, |
| { |
| "epoch": 4.64, |
| "learning_rate": 1.4429530201342285e-06, |
| "loss": 1.2842, |
| "step": 2765 |
| }, |
| { |
| "epoch": 4.64, |
| "learning_rate": 1.4362416107382551e-06, |
| "loss": 1.3037, |
| "step": 2766 |
| }, |
| { |
| "epoch": 4.64, |
| "learning_rate": 1.429530201342282e-06, |
| "loss": 1.2207, |
| "step": 2767 |
| }, |
| { |
| "epoch": 4.64, |
| "learning_rate": 1.4228187919463088e-06, |
| "loss": 1.2666, |
| "step": 2768 |
| }, |
| { |
| "epoch": 4.65, |
| "learning_rate": 1.4161073825503357e-06, |
| "loss": 1.2969, |
| "step": 2769 |
| }, |
| { |
| "epoch": 4.65, |
| "learning_rate": 1.4093959731543623e-06, |
| "loss": 1.2881, |
| "step": 2770 |
| }, |
| { |
| "epoch": 4.65, |
| "learning_rate": 1.4026845637583894e-06, |
| "loss": 1.2744, |
| "step": 2771 |
| }, |
| { |
| "epoch": 4.65, |
| "learning_rate": 1.395973154362416e-06, |
| "loss": 1.2881, |
| "step": 2772 |
| }, |
| { |
| "epoch": 4.65, |
| "learning_rate": 1.3892617449664431e-06, |
| "loss": 1.2979, |
| "step": 2773 |
| }, |
| { |
| "epoch": 4.65, |
| "learning_rate": 1.38255033557047e-06, |
| "loss": 1.2715, |
| "step": 2774 |
| }, |
| { |
| "epoch": 4.66, |
| "learning_rate": 1.3758389261744968e-06, |
| "loss": 1.3223, |
| "step": 2775 |
| }, |
| { |
| "epoch": 4.66, |
| "learning_rate": 1.3691275167785237e-06, |
| "loss": 1.2617, |
| "step": 2776 |
| }, |
| { |
| "epoch": 4.66, |
| "learning_rate": 1.3624161073825503e-06, |
| "loss": 1.2461, |
| "step": 2777 |
| }, |
| { |
| "epoch": 4.66, |
| "learning_rate": 1.3557046979865774e-06, |
| "loss": 1.2471, |
| "step": 2778 |
| }, |
| { |
| "epoch": 4.66, |
| "learning_rate": 1.348993288590604e-06, |
| "loss": 1.2891, |
| "step": 2779 |
| }, |
| { |
| "epoch": 4.66, |
| "learning_rate": 1.342281879194631e-06, |
| "loss": 1.2783, |
| "step": 2780 |
| }, |
| { |
| "epoch": 4.67, |
| "learning_rate": 1.335570469798658e-06, |
| "loss": 1.2891, |
| "step": 2781 |
| }, |
| { |
| "epoch": 4.67, |
| "learning_rate": 1.3288590604026846e-06, |
| "loss": 1.2725, |
| "step": 2782 |
| }, |
| { |
| "epoch": 4.67, |
| "learning_rate": 1.3221476510067116e-06, |
| "loss": 1.2686, |
| "step": 2783 |
| }, |
| { |
| "epoch": 4.67, |
| "learning_rate": 1.3154362416107383e-06, |
| "loss": 1.2725, |
| "step": 2784 |
| }, |
| { |
| "epoch": 4.67, |
| "learning_rate": 1.3087248322147653e-06, |
| "loss": 1.2803, |
| "step": 2785 |
| }, |
| { |
| "epoch": 4.67, |
| "learning_rate": 1.302013422818792e-06, |
| "loss": 1.2764, |
| "step": 2786 |
| }, |
| { |
| "epoch": 4.68, |
| "learning_rate": 1.2953020134228188e-06, |
| "loss": 1.2305, |
| "step": 2787 |
| }, |
| { |
| "epoch": 4.68, |
| "learning_rate": 1.288590604026846e-06, |
| "loss": 1.2539, |
| "step": 2788 |
| }, |
| { |
| "epoch": 4.68, |
| "learning_rate": 1.2818791946308726e-06, |
| "loss": 1.2559, |
| "step": 2789 |
| }, |
| { |
| "epoch": 4.68, |
| "learning_rate": 1.2751677852348996e-06, |
| "loss": 1.2588, |
| "step": 2790 |
| }, |
| { |
| "epoch": 4.68, |
| "learning_rate": 1.2684563758389263e-06, |
| "loss": 1.2803, |
| "step": 2791 |
| }, |
| { |
| "epoch": 4.68, |
| "learning_rate": 1.2617449664429531e-06, |
| "loss": 1.2666, |
| "step": 2792 |
| }, |
| { |
| "epoch": 4.69, |
| "learning_rate": 1.25503355704698e-06, |
| "loss": 1.3223, |
| "step": 2793 |
| }, |
| { |
| "epoch": 4.69, |
| "learning_rate": 1.2483221476510068e-06, |
| "loss": 1.25, |
| "step": 2794 |
| }, |
| { |
| "epoch": 4.69, |
| "learning_rate": 1.2416107382550337e-06, |
| "loss": 1.2656, |
| "step": 2795 |
| }, |
| { |
| "epoch": 4.69, |
| "learning_rate": 1.2348993288590605e-06, |
| "loss": 1.2764, |
| "step": 2796 |
| }, |
| { |
| "epoch": 4.69, |
| "learning_rate": 1.2281879194630874e-06, |
| "loss": 1.2461, |
| "step": 2797 |
| }, |
| { |
| "epoch": 4.69, |
| "learning_rate": 1.2214765100671142e-06, |
| "loss": 1.2725, |
| "step": 2798 |
| }, |
| { |
| "epoch": 4.7, |
| "learning_rate": 1.214765100671141e-06, |
| "loss": 1.2686, |
| "step": 2799 |
| }, |
| { |
| "epoch": 4.7, |
| "learning_rate": 1.208053691275168e-06, |
| "loss": 1.2764, |
| "step": 2800 |
| }, |
| { |
| "epoch": 4.7, |
| "learning_rate": 1.2013422818791948e-06, |
| "loss": 1.2539, |
| "step": 2801 |
| }, |
| { |
| "epoch": 4.7, |
| "learning_rate": 1.1946308724832216e-06, |
| "loss": 1.3008, |
| "step": 2802 |
| }, |
| { |
| "epoch": 4.7, |
| "learning_rate": 1.1879194630872485e-06, |
| "loss": 1.2354, |
| "step": 2803 |
| }, |
| { |
| "epoch": 4.7, |
| "learning_rate": 1.1812080536912753e-06, |
| "loss": 1.2617, |
| "step": 2804 |
| }, |
| { |
| "epoch": 4.71, |
| "learning_rate": 1.174496644295302e-06, |
| "loss": 1.2725, |
| "step": 2805 |
| }, |
| { |
| "epoch": 4.71, |
| "learning_rate": 1.1677852348993288e-06, |
| "loss": 1.2812, |
| "step": 2806 |
| }, |
| { |
| "epoch": 4.71, |
| "learning_rate": 1.1610738255033557e-06, |
| "loss": 1.3008, |
| "step": 2807 |
| }, |
| { |
| "epoch": 4.71, |
| "learning_rate": 1.1543624161073828e-06, |
| "loss": 1.293, |
| "step": 2808 |
| }, |
| { |
| "epoch": 4.71, |
| "learning_rate": 1.1476510067114096e-06, |
| "loss": 1.2871, |
| "step": 2809 |
| }, |
| { |
| "epoch": 4.71, |
| "learning_rate": 1.1409395973154363e-06, |
| "loss": 1.2715, |
| "step": 2810 |
| }, |
| { |
| "epoch": 4.72, |
| "learning_rate": 1.134228187919463e-06, |
| "loss": 1.2861, |
| "step": 2811 |
| }, |
| { |
| "epoch": 4.72, |
| "learning_rate": 1.12751677852349e-06, |
| "loss": 1.2393, |
| "step": 2812 |
| }, |
| { |
| "epoch": 4.72, |
| "learning_rate": 1.1208053691275168e-06, |
| "loss": 1.2617, |
| "step": 2813 |
| }, |
| { |
| "epoch": 4.72, |
| "learning_rate": 1.1140939597315437e-06, |
| "loss": 1.25, |
| "step": 2814 |
| }, |
| { |
| "epoch": 4.72, |
| "learning_rate": 1.1073825503355707e-06, |
| "loss": 1.2842, |
| "step": 2815 |
| }, |
| { |
| "epoch": 4.72, |
| "learning_rate": 1.1006711409395974e-06, |
| "loss": 1.2666, |
| "step": 2816 |
| }, |
| { |
| "epoch": 4.73, |
| "learning_rate": 1.0939597315436242e-06, |
| "loss": 1.2783, |
| "step": 2817 |
| }, |
| { |
| "epoch": 4.73, |
| "learning_rate": 1.087248322147651e-06, |
| "loss": 1.2568, |
| "step": 2818 |
| }, |
| { |
| "epoch": 4.73, |
| "learning_rate": 1.080536912751678e-06, |
| "loss": 1.2773, |
| "step": 2819 |
| }, |
| { |
| "epoch": 4.73, |
| "learning_rate": 1.0738255033557048e-06, |
| "loss": 1.2549, |
| "step": 2820 |
| }, |
| { |
| "epoch": 4.73, |
| "learning_rate": 1.0671140939597316e-06, |
| "loss": 1.3135, |
| "step": 2821 |
| }, |
| { |
| "epoch": 4.73, |
| "learning_rate": 1.0604026845637585e-06, |
| "loss": 1.2793, |
| "step": 2822 |
| }, |
| { |
| "epoch": 4.74, |
| "learning_rate": 1.0536912751677853e-06, |
| "loss": 1.2949, |
| "step": 2823 |
| }, |
| { |
| "epoch": 4.74, |
| "learning_rate": 1.0469798657718122e-06, |
| "loss": 1.2617, |
| "step": 2824 |
| }, |
| { |
| "epoch": 4.74, |
| "learning_rate": 1.040268456375839e-06, |
| "loss": 1.2539, |
| "step": 2825 |
| }, |
| { |
| "epoch": 4.74, |
| "learning_rate": 1.033557046979866e-06, |
| "loss": 1.2637, |
| "step": 2826 |
| }, |
| { |
| "epoch": 4.74, |
| "learning_rate": 1.0268456375838928e-06, |
| "loss": 1.2676, |
| "step": 2827 |
| }, |
| { |
| "epoch": 4.74, |
| "learning_rate": 1.0201342281879196e-06, |
| "loss": 1.25, |
| "step": 2828 |
| }, |
| { |
| "epoch": 4.75, |
| "learning_rate": 1.0134228187919462e-06, |
| "loss": 1.2559, |
| "step": 2829 |
| }, |
| { |
| "epoch": 4.75, |
| "learning_rate": 1.006711409395973e-06, |
| "loss": 1.2705, |
| "step": 2830 |
| }, |
| { |
| "epoch": 4.75, |
| "learning_rate": 1.0000000000000002e-06, |
| "loss": 1.2725, |
| "step": 2831 |
| }, |
| { |
| "epoch": 4.75, |
| "learning_rate": 9.93288590604027e-07, |
| "loss": 1.2637, |
| "step": 2832 |
| }, |
| { |
| "epoch": 4.75, |
| "learning_rate": 9.865771812080539e-07, |
| "loss": 1.2725, |
| "step": 2833 |
| }, |
| { |
| "epoch": 4.76, |
| "learning_rate": 9.798657718120805e-07, |
| "loss": 1.25, |
| "step": 2834 |
| }, |
| { |
| "epoch": 4.76, |
| "learning_rate": 9.731543624161074e-07, |
| "loss": 1.2695, |
| "step": 2835 |
| }, |
| { |
| "epoch": 4.76, |
| "learning_rate": 9.664429530201342e-07, |
| "loss": 1.249, |
| "step": 2836 |
| }, |
| { |
| "epoch": 4.76, |
| "learning_rate": 9.59731543624161e-07, |
| "loss": 1.2803, |
| "step": 2837 |
| }, |
| { |
| "epoch": 4.76, |
| "learning_rate": 9.53020134228188e-07, |
| "loss": 1.2559, |
| "step": 2838 |
| }, |
| { |
| "epoch": 4.76, |
| "learning_rate": 9.463087248322149e-07, |
| "loss": 1.2783, |
| "step": 2839 |
| }, |
| { |
| "epoch": 4.77, |
| "learning_rate": 9.395973154362417e-07, |
| "loss": 1.25, |
| "step": 2840 |
| }, |
| { |
| "epoch": 4.77, |
| "learning_rate": 9.328859060402686e-07, |
| "loss": 1.248, |
| "step": 2841 |
| }, |
| { |
| "epoch": 4.77, |
| "learning_rate": 9.261744966442953e-07, |
| "loss": 1.2715, |
| "step": 2842 |
| }, |
| { |
| "epoch": 4.77, |
| "learning_rate": 9.194630872483222e-07, |
| "loss": 1.2832, |
| "step": 2843 |
| }, |
| { |
| "epoch": 4.77, |
| "learning_rate": 9.12751677852349e-07, |
| "loss": 1.2627, |
| "step": 2844 |
| }, |
| { |
| "epoch": 4.77, |
| "learning_rate": 9.060402684563759e-07, |
| "loss": 1.2617, |
| "step": 2845 |
| }, |
| { |
| "epoch": 4.78, |
| "learning_rate": 8.993288590604029e-07, |
| "loss": 1.25, |
| "step": 2846 |
| }, |
| { |
| "epoch": 4.78, |
| "learning_rate": 8.926174496644296e-07, |
| "loss": 1.2949, |
| "step": 2847 |
| }, |
| { |
| "epoch": 4.78, |
| "learning_rate": 8.859060402684565e-07, |
| "loss": 1.2627, |
| "step": 2848 |
| }, |
| { |
| "epoch": 4.78, |
| "learning_rate": 8.791946308724833e-07, |
| "loss": 1.251, |
| "step": 2849 |
| }, |
| { |
| "epoch": 4.78, |
| "learning_rate": 8.724832214765102e-07, |
| "loss": 1.2646, |
| "step": 2850 |
| }, |
| { |
| "epoch": 4.78, |
| "learning_rate": 8.657718120805369e-07, |
| "loss": 1.2754, |
| "step": 2851 |
| }, |
| { |
| "epoch": 4.79, |
| "learning_rate": 8.590604026845638e-07, |
| "loss": 1.2578, |
| "step": 2852 |
| }, |
| { |
| "epoch": 4.79, |
| "learning_rate": 8.523489932885906e-07, |
| "loss": 1.2686, |
| "step": 2853 |
| }, |
| { |
| "epoch": 4.79, |
| "learning_rate": 8.456375838926176e-07, |
| "loss": 1.2441, |
| "step": 2854 |
| }, |
| { |
| "epoch": 4.79, |
| "learning_rate": 8.389261744966444e-07, |
| "loss": 1.2354, |
| "step": 2855 |
| }, |
| { |
| "epoch": 4.79, |
| "learning_rate": 8.322147651006713e-07, |
| "loss": 1.2754, |
| "step": 2856 |
| }, |
| { |
| "epoch": 4.79, |
| "learning_rate": 8.25503355704698e-07, |
| "loss": 1.2705, |
| "step": 2857 |
| }, |
| { |
| "epoch": 4.8, |
| "learning_rate": 8.187919463087249e-07, |
| "loss": 1.2803, |
| "step": 2858 |
| }, |
| { |
| "epoch": 4.8, |
| "learning_rate": 8.120805369127517e-07, |
| "loss": 1.2422, |
| "step": 2859 |
| }, |
| { |
| "epoch": 4.8, |
| "learning_rate": 8.053691275167786e-07, |
| "loss": 1.2637, |
| "step": 2860 |
| }, |
| { |
| "epoch": 4.8, |
| "learning_rate": 7.986577181208055e-07, |
| "loss": 1.2793, |
| "step": 2861 |
| }, |
| { |
| "epoch": 4.8, |
| "learning_rate": 7.919463087248323e-07, |
| "loss": 1.2666, |
| "step": 2862 |
| }, |
| { |
| "epoch": 4.8, |
| "learning_rate": 7.852348993288591e-07, |
| "loss": 1.2812, |
| "step": 2863 |
| }, |
| { |
| "epoch": 4.81, |
| "learning_rate": 7.78523489932886e-07, |
| "loss": 1.2578, |
| "step": 2864 |
| }, |
| { |
| "epoch": 4.81, |
| "learning_rate": 7.718120805369129e-07, |
| "loss": 1.2832, |
| "step": 2865 |
| }, |
| { |
| "epoch": 4.81, |
| "learning_rate": 7.651006711409396e-07, |
| "loss": 1.293, |
| "step": 2866 |
| }, |
| { |
| "epoch": 4.81, |
| "learning_rate": 7.583892617449665e-07, |
| "loss": 1.2549, |
| "step": 2867 |
| }, |
| { |
| "epoch": 4.81, |
| "learning_rate": 7.516778523489933e-07, |
| "loss": 1.2734, |
| "step": 2868 |
| }, |
| { |
| "epoch": 4.81, |
| "learning_rate": 7.449664429530203e-07, |
| "loss": 1.248, |
| "step": 2869 |
| }, |
| { |
| "epoch": 4.82, |
| "learning_rate": 7.382550335570471e-07, |
| "loss": 1.2842, |
| "step": 2870 |
| }, |
| { |
| "epoch": 4.82, |
| "learning_rate": 7.315436241610739e-07, |
| "loss": 1.2393, |
| "step": 2871 |
| }, |
| { |
| "epoch": 4.82, |
| "learning_rate": 7.248322147651007e-07, |
| "loss": 1.2939, |
| "step": 2872 |
| }, |
| { |
| "epoch": 4.82, |
| "learning_rate": 7.181208053691276e-07, |
| "loss": 1.2324, |
| "step": 2873 |
| }, |
| { |
| "epoch": 4.82, |
| "learning_rate": 7.114093959731544e-07, |
| "loss": 1.2734, |
| "step": 2874 |
| }, |
| { |
| "epoch": 4.82, |
| "learning_rate": 7.046979865771812e-07, |
| "loss": 1.2705, |
| "step": 2875 |
| }, |
| { |
| "epoch": 4.83, |
| "learning_rate": 6.97986577181208e-07, |
| "loss": 1.2734, |
| "step": 2876 |
| }, |
| { |
| "epoch": 4.83, |
| "learning_rate": 6.91275167785235e-07, |
| "loss": 1.2324, |
| "step": 2877 |
| }, |
| { |
| "epoch": 4.83, |
| "learning_rate": 6.845637583892618e-07, |
| "loss": 1.2891, |
| "step": 2878 |
| }, |
| { |
| "epoch": 4.83, |
| "learning_rate": 6.778523489932887e-07, |
| "loss": 1.2852, |
| "step": 2879 |
| }, |
| { |
| "epoch": 4.83, |
| "learning_rate": 6.711409395973155e-07, |
| "loss": 1.2832, |
| "step": 2880 |
| }, |
| { |
| "epoch": 4.83, |
| "learning_rate": 6.644295302013423e-07, |
| "loss": 1.2471, |
| "step": 2881 |
| }, |
| { |
| "epoch": 4.84, |
| "learning_rate": 6.577181208053691e-07, |
| "loss": 1.2832, |
| "step": 2882 |
| }, |
| { |
| "epoch": 4.84, |
| "learning_rate": 6.51006711409396e-07, |
| "loss": 1.2852, |
| "step": 2883 |
| }, |
| { |
| "epoch": 4.84, |
| "learning_rate": 6.44295302013423e-07, |
| "loss": 1.2451, |
| "step": 2884 |
| }, |
| { |
| "epoch": 4.84, |
| "learning_rate": 6.375838926174498e-07, |
| "loss": 1.2627, |
| "step": 2885 |
| }, |
| { |
| "epoch": 4.84, |
| "learning_rate": 6.308724832214766e-07, |
| "loss": 1.2705, |
| "step": 2886 |
| }, |
| { |
| "epoch": 4.84, |
| "learning_rate": 6.241610738255034e-07, |
| "loss": 1.2373, |
| "step": 2887 |
| }, |
| { |
| "epoch": 4.85, |
| "learning_rate": 6.174496644295303e-07, |
| "loss": 1.2861, |
| "step": 2888 |
| }, |
| { |
| "epoch": 4.85, |
| "learning_rate": 6.107382550335571e-07, |
| "loss": 1.2744, |
| "step": 2889 |
| }, |
| { |
| "epoch": 4.85, |
| "learning_rate": 6.04026845637584e-07, |
| "loss": 1.2588, |
| "step": 2890 |
| }, |
| { |
| "epoch": 4.85, |
| "learning_rate": 5.973154362416108e-07, |
| "loss": 1.2666, |
| "step": 2891 |
| }, |
| { |
| "epoch": 4.85, |
| "learning_rate": 5.906040268456377e-07, |
| "loss": 1.2793, |
| "step": 2892 |
| }, |
| { |
| "epoch": 4.85, |
| "learning_rate": 5.838926174496644e-07, |
| "loss": 1.2549, |
| "step": 2893 |
| }, |
| { |
| "epoch": 4.86, |
| "learning_rate": 5.771812080536914e-07, |
| "loss": 1.2578, |
| "step": 2894 |
| }, |
| { |
| "epoch": 4.86, |
| "learning_rate": 5.704697986577181e-07, |
| "loss": 1.2822, |
| "step": 2895 |
| }, |
| { |
| "epoch": 4.86, |
| "learning_rate": 5.63758389261745e-07, |
| "loss": 1.293, |
| "step": 2896 |
| }, |
| { |
| "epoch": 4.86, |
| "learning_rate": 5.570469798657718e-07, |
| "loss": 1.2471, |
| "step": 2897 |
| }, |
| { |
| "epoch": 4.86, |
| "learning_rate": 5.503355704697987e-07, |
| "loss": 1.2715, |
| "step": 2898 |
| }, |
| { |
| "epoch": 4.86, |
| "learning_rate": 5.436241610738255e-07, |
| "loss": 1.2627, |
| "step": 2899 |
| }, |
| { |
| "epoch": 4.87, |
| "learning_rate": 5.369127516778524e-07, |
| "loss": 1.2725, |
| "step": 2900 |
| }, |
| { |
| "epoch": 4.87, |
| "learning_rate": 5.302013422818792e-07, |
| "loss": 1.2607, |
| "step": 2901 |
| }, |
| { |
| "epoch": 4.87, |
| "learning_rate": 5.234899328859061e-07, |
| "loss": 1.2568, |
| "step": 2902 |
| }, |
| { |
| "epoch": 4.87, |
| "learning_rate": 5.16778523489933e-07, |
| "loss": 1.2871, |
| "step": 2903 |
| }, |
| { |
| "epoch": 4.87, |
| "learning_rate": 5.100671140939598e-07, |
| "loss": 1.2725, |
| "step": 2904 |
| }, |
| { |
| "epoch": 4.87, |
| "learning_rate": 5.033557046979866e-07, |
| "loss": 1.2529, |
| "step": 2905 |
| }, |
| { |
| "epoch": 4.88, |
| "learning_rate": 4.966442953020135e-07, |
| "loss": 1.291, |
| "step": 2906 |
| }, |
| { |
| "epoch": 4.88, |
| "learning_rate": 4.899328859060403e-07, |
| "loss": 1.2754, |
| "step": 2907 |
| }, |
| { |
| "epoch": 4.88, |
| "learning_rate": 4.832214765100671e-07, |
| "loss": 1.2646, |
| "step": 2908 |
| }, |
| { |
| "epoch": 4.88, |
| "learning_rate": 4.76510067114094e-07, |
| "loss": 1.2637, |
| "step": 2909 |
| }, |
| { |
| "epoch": 4.88, |
| "learning_rate": 4.6979865771812087e-07, |
| "loss": 1.2656, |
| "step": 2910 |
| }, |
| { |
| "epoch": 4.88, |
| "learning_rate": 4.6308724832214767e-07, |
| "loss": 1.2754, |
| "step": 2911 |
| }, |
| { |
| "epoch": 4.89, |
| "learning_rate": 4.563758389261745e-07, |
| "loss": 1.252, |
| "step": 2912 |
| }, |
| { |
| "epoch": 4.89, |
| "learning_rate": 4.4966442953020143e-07, |
| "loss": 1.2754, |
| "step": 2913 |
| }, |
| { |
| "epoch": 4.89, |
| "learning_rate": 4.4295302013422823e-07, |
| "loss": 1.2949, |
| "step": 2914 |
| }, |
| { |
| "epoch": 4.89, |
| "learning_rate": 4.362416107382551e-07, |
| "loss": 1.2949, |
| "step": 2915 |
| }, |
| { |
| "epoch": 4.89, |
| "learning_rate": 4.295302013422819e-07, |
| "loss": 1.2939, |
| "step": 2916 |
| }, |
| { |
| "epoch": 4.89, |
| "learning_rate": 4.228187919463088e-07, |
| "loss": 1.29, |
| "step": 2917 |
| }, |
| { |
| "epoch": 4.9, |
| "learning_rate": 4.1610738255033564e-07, |
| "loss": 1.251, |
| "step": 2918 |
| }, |
| { |
| "epoch": 4.9, |
| "learning_rate": 4.0939597315436244e-07, |
| "loss": 1.2891, |
| "step": 2919 |
| }, |
| { |
| "epoch": 4.9, |
| "learning_rate": 4.026845637583893e-07, |
| "loss": 1.2861, |
| "step": 2920 |
| }, |
| { |
| "epoch": 4.9, |
| "learning_rate": 3.9597315436241615e-07, |
| "loss": 1.2637, |
| "step": 2921 |
| }, |
| { |
| "epoch": 4.9, |
| "learning_rate": 3.89261744966443e-07, |
| "loss": 1.2627, |
| "step": 2922 |
| }, |
| { |
| "epoch": 4.9, |
| "learning_rate": 3.825503355704698e-07, |
| "loss": 1.2881, |
| "step": 2923 |
| }, |
| { |
| "epoch": 4.91, |
| "learning_rate": 3.7583892617449665e-07, |
| "loss": 1.2646, |
| "step": 2924 |
| }, |
| { |
| "epoch": 4.91, |
| "learning_rate": 3.6912751677852356e-07, |
| "loss": 1.2686, |
| "step": 2925 |
| }, |
| { |
| "epoch": 4.91, |
| "learning_rate": 3.6241610738255036e-07, |
| "loss": 1.2842, |
| "step": 2926 |
| }, |
| { |
| "epoch": 4.91, |
| "learning_rate": 3.557046979865772e-07, |
| "loss": 1.2764, |
| "step": 2927 |
| }, |
| { |
| "epoch": 4.91, |
| "learning_rate": 3.48993288590604e-07, |
| "loss": 1.2861, |
| "step": 2928 |
| }, |
| { |
| "epoch": 4.91, |
| "learning_rate": 3.422818791946309e-07, |
| "loss": 1.2148, |
| "step": 2929 |
| }, |
| { |
| "epoch": 4.92, |
| "learning_rate": 3.3557046979865777e-07, |
| "loss": 1.2705, |
| "step": 2930 |
| }, |
| { |
| "epoch": 4.92, |
| "learning_rate": 3.2885906040268457e-07, |
| "loss": 1.252, |
| "step": 2931 |
| }, |
| { |
| "epoch": 4.92, |
| "learning_rate": 3.221476510067115e-07, |
| "loss": 1.2822, |
| "step": 2932 |
| }, |
| { |
| "epoch": 4.92, |
| "learning_rate": 3.154362416107383e-07, |
| "loss": 1.2666, |
| "step": 2933 |
| }, |
| { |
| "epoch": 4.92, |
| "learning_rate": 3.0872483221476513e-07, |
| "loss": 1.2461, |
| "step": 2934 |
| }, |
| { |
| "epoch": 4.92, |
| "learning_rate": 3.02013422818792e-07, |
| "loss": 1.2832, |
| "step": 2935 |
| }, |
| { |
| "epoch": 4.93, |
| "learning_rate": 2.9530201342281884e-07, |
| "loss": 1.2666, |
| "step": 2936 |
| }, |
| { |
| "epoch": 4.93, |
| "learning_rate": 2.885906040268457e-07, |
| "loss": 1.2646, |
| "step": 2937 |
| }, |
| { |
| "epoch": 4.93, |
| "learning_rate": 2.818791946308725e-07, |
| "loss": 1.292, |
| "step": 2938 |
| }, |
| { |
| "epoch": 4.93, |
| "learning_rate": 2.7516778523489934e-07, |
| "loss": 1.2812, |
| "step": 2939 |
| }, |
| { |
| "epoch": 4.93, |
| "learning_rate": 2.684563758389262e-07, |
| "loss": 1.251, |
| "step": 2940 |
| }, |
| { |
| "epoch": 4.93, |
| "learning_rate": 2.6174496644295305e-07, |
| "loss": 1.2695, |
| "step": 2941 |
| }, |
| { |
| "epoch": 4.94, |
| "learning_rate": 2.550335570469799e-07, |
| "loss": 1.2539, |
| "step": 2942 |
| }, |
| { |
| "epoch": 4.94, |
| "learning_rate": 2.4832214765100675e-07, |
| "loss": 1.2598, |
| "step": 2943 |
| }, |
| { |
| "epoch": 4.94, |
| "learning_rate": 2.4161073825503355e-07, |
| "loss": 1.2686, |
| "step": 2944 |
| }, |
| { |
| "epoch": 4.94, |
| "learning_rate": 2.3489932885906043e-07, |
| "loss": 1.2588, |
| "step": 2945 |
| }, |
| { |
| "epoch": 4.94, |
| "learning_rate": 2.2818791946308726e-07, |
| "loss": 1.2939, |
| "step": 2946 |
| }, |
| { |
| "epoch": 4.94, |
| "learning_rate": 2.2147651006711411e-07, |
| "loss": 1.2891, |
| "step": 2947 |
| }, |
| { |
| "epoch": 4.95, |
| "learning_rate": 2.1476510067114094e-07, |
| "loss": 1.2871, |
| "step": 2948 |
| }, |
| { |
| "epoch": 4.95, |
| "learning_rate": 2.0805369127516782e-07, |
| "loss": 1.2559, |
| "step": 2949 |
| }, |
| { |
| "epoch": 4.95, |
| "learning_rate": 2.0134228187919465e-07, |
| "loss": 1.2402, |
| "step": 2950 |
| }, |
| { |
| "epoch": 4.95, |
| "learning_rate": 1.946308724832215e-07, |
| "loss": 1.2773, |
| "step": 2951 |
| }, |
| { |
| "epoch": 4.95, |
| "learning_rate": 1.8791946308724833e-07, |
| "loss": 1.2842, |
| "step": 2952 |
| }, |
| { |
| "epoch": 4.95, |
| "learning_rate": 1.8120805369127518e-07, |
| "loss": 1.2568, |
| "step": 2953 |
| }, |
| { |
| "epoch": 4.96, |
| "learning_rate": 1.74496644295302e-07, |
| "loss": 1.2715, |
| "step": 2954 |
| }, |
| { |
| "epoch": 4.96, |
| "learning_rate": 1.6778523489932889e-07, |
| "loss": 1.2676, |
| "step": 2955 |
| }, |
| { |
| "epoch": 4.96, |
| "learning_rate": 1.6107382550335574e-07, |
| "loss": 1.3105, |
| "step": 2956 |
| }, |
| { |
| "epoch": 4.96, |
| "learning_rate": 1.5436241610738257e-07, |
| "loss": 1.2842, |
| "step": 2957 |
| }, |
| { |
| "epoch": 4.96, |
| "learning_rate": 1.4765100671140942e-07, |
| "loss": 1.2705, |
| "step": 2958 |
| }, |
| { |
| "epoch": 4.96, |
| "learning_rate": 1.4093959731543624e-07, |
| "loss": 1.2852, |
| "step": 2959 |
| }, |
| { |
| "epoch": 4.97, |
| "learning_rate": 1.342281879194631e-07, |
| "loss": 1.2861, |
| "step": 2960 |
| }, |
| { |
| "epoch": 4.97, |
| "learning_rate": 1.2751677852348995e-07, |
| "loss": 1.2881, |
| "step": 2961 |
| }, |
| { |
| "epoch": 4.97, |
| "learning_rate": 1.2080536912751678e-07, |
| "loss": 1.2617, |
| "step": 2962 |
| }, |
| { |
| "epoch": 4.97, |
| "learning_rate": 1.1409395973154363e-07, |
| "loss": 1.2432, |
| "step": 2963 |
| }, |
| { |
| "epoch": 4.97, |
| "learning_rate": 1.0738255033557047e-07, |
| "loss": 1.251, |
| "step": 2964 |
| }, |
| { |
| "epoch": 4.97, |
| "learning_rate": 1.0067114093959732e-07, |
| "loss": 1.2861, |
| "step": 2965 |
| }, |
| { |
| "epoch": 4.98, |
| "learning_rate": 9.395973154362416e-08, |
| "loss": 1.2432, |
| "step": 2966 |
| }, |
| { |
| "epoch": 4.98, |
| "learning_rate": 8.7248322147651e-08, |
| "loss": 1.2744, |
| "step": 2967 |
| }, |
| { |
| "epoch": 4.98, |
| "learning_rate": 8.053691275167787e-08, |
| "loss": 1.2607, |
| "step": 2968 |
| }, |
| { |
| "epoch": 4.98, |
| "learning_rate": 7.382550335570471e-08, |
| "loss": 1.2617, |
| "step": 2969 |
| }, |
| { |
| "epoch": 4.98, |
| "learning_rate": 6.711409395973155e-08, |
| "loss": 1.2568, |
| "step": 2970 |
| }, |
| { |
| "epoch": 4.98, |
| "learning_rate": 6.040268456375839e-08, |
| "loss": 1.2998, |
| "step": 2971 |
| }, |
| { |
| "epoch": 4.99, |
| "learning_rate": 5.3691275167785235e-08, |
| "loss": 1.2461, |
| "step": 2972 |
| }, |
| { |
| "epoch": 4.99, |
| "learning_rate": 4.697986577181208e-08, |
| "loss": 1.2832, |
| "step": 2973 |
| }, |
| { |
| "epoch": 4.99, |
| "learning_rate": 4.0268456375838935e-08, |
| "loss": 1.2891, |
| "step": 2974 |
| }, |
| { |
| "epoch": 4.99, |
| "learning_rate": 3.3557046979865774e-08, |
| "loss": 1.2754, |
| "step": 2975 |
| }, |
| { |
| "epoch": 4.99, |
| "learning_rate": 2.6845637583892618e-08, |
| "loss": 1.2607, |
| "step": 2976 |
| }, |
| { |
| "epoch": 4.99, |
| "learning_rate": 2.0134228187919467e-08, |
| "loss": 1.2461, |
| "step": 2977 |
| }, |
| { |
| "epoch": 5.0, |
| "learning_rate": 1.3422818791946309e-08, |
| "loss": 1.2412, |
| "step": 2978 |
| }, |
| { |
| "epoch": 5.0, |
| "learning_rate": 6.711409395973154e-09, |
| "loss": 1.2656, |
| "step": 2979 |
| }, |
| { |
| "epoch": 5.0, |
| "learning_rate": 0.0, |
| "loss": 1.2773, |
| "step": 2980 |
| }, |
| { |
| "epoch": 5.0, |
| "step": 2980, |
| "total_flos": 1.2443370415928115e+18, |
| "train_loss": 1.3799116505872484, |
| "train_runtime": 3508.8142, |
| "train_samples_per_second": 108.638, |
| "train_steps_per_second": 0.849 |
| } |
| ], |
| "max_steps": 2980, |
| "num_train_epochs": 5, |
| "total_flos": 1.2443370415928115e+18, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|