| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 3.0, |
| "eval_steps": 500, |
| "global_step": 1410, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.00425531914893617, |
| "grad_norm": 1.125, |
| "learning_rate": 8.450704225352114e-07, |
| "loss": 1.4721390008926392, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.00851063829787234, |
| "grad_norm": 1.046875, |
| "learning_rate": 2.535211267605634e-06, |
| "loss": 1.9258784055709839, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.01276595744680851, |
| "grad_norm": 5.78125, |
| "learning_rate": 4.225352112676057e-06, |
| "loss": 1.9619797468185425, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.01702127659574468, |
| "grad_norm": 2.5625, |
| "learning_rate": 5.915492957746478e-06, |
| "loss": 1.6346324682235718, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.02127659574468085, |
| "grad_norm": 1.21875, |
| "learning_rate": 7.6056338028169015e-06, |
| "loss": 1.9556920528411865, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.02553191489361702, |
| "grad_norm": 0.87109375, |
| "learning_rate": 9.295774647887323e-06, |
| "loss": 1.470306634902954, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.029787234042553193, |
| "grad_norm": 1.3984375, |
| "learning_rate": 1.0985915492957748e-05, |
| "loss": 2.068174362182617, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.03404255319148936, |
| "grad_norm": 1.03125, |
| "learning_rate": 1.267605633802817e-05, |
| "loss": 1.903701663017273, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.03829787234042553, |
| "grad_norm": 1.0703125, |
| "learning_rate": 1.436619718309859e-05, |
| "loss": 1.765465259552002, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.0425531914893617, |
| "grad_norm": 0.87890625, |
| "learning_rate": 1.6056338028169014e-05, |
| "loss": 1.9205501079559326, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.04680851063829787, |
| "grad_norm": 0.8359375, |
| "learning_rate": 1.7746478873239435e-05, |
| "loss": 1.6455962657928467, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.05106382978723404, |
| "grad_norm": 0.83984375, |
| "learning_rate": 1.943661971830986e-05, |
| "loss": 1.6872483491897583, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.05531914893617021, |
| "grad_norm": 2.234375, |
| "learning_rate": 2.112676056338028e-05, |
| "loss": 1.4278151988983154, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.059574468085106386, |
| "grad_norm": 0.76171875, |
| "learning_rate": 2.2816901408450703e-05, |
| "loss": 1.7643554210662842, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.06382978723404255, |
| "grad_norm": 0.71484375, |
| "learning_rate": 2.4507042253521128e-05, |
| "loss": 1.686973214149475, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.06808510638297872, |
| "grad_norm": 0.63671875, |
| "learning_rate": 2.619718309859155e-05, |
| "loss": 1.4633193016052246, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.07234042553191489, |
| "grad_norm": 0.60546875, |
| "learning_rate": 2.788732394366197e-05, |
| "loss": 1.6693249940872192, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.07659574468085106, |
| "grad_norm": 0.55859375, |
| "learning_rate": 2.9577464788732395e-05, |
| "loss": 1.657246708869934, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.08085106382978724, |
| "grad_norm": 1.0703125, |
| "learning_rate": 3.126760563380282e-05, |
| "loss": 1.882036805152893, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.0851063829787234, |
| "grad_norm": 0.765625, |
| "learning_rate": 3.2957746478873245e-05, |
| "loss": 1.618768572807312, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.08936170212765958, |
| "grad_norm": 1.0078125, |
| "learning_rate": 3.4647887323943666e-05, |
| "loss": 1.29584801197052, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.09361702127659574, |
| "grad_norm": 0.5546875, |
| "learning_rate": 3.633802816901408e-05, |
| "loss": 1.5222253799438477, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.09787234042553192, |
| "grad_norm": 1.2890625, |
| "learning_rate": 3.802816901408451e-05, |
| "loss": 1.3532958030700684, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.10212765957446808, |
| "grad_norm": 1.1640625, |
| "learning_rate": 3.971830985915493e-05, |
| "loss": 1.5933119058609009, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.10638297872340426, |
| "grad_norm": 1.671875, |
| "learning_rate": 4.140845070422535e-05, |
| "loss": 1.6478880643844604, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.11063829787234042, |
| "grad_norm": 0.578125, |
| "learning_rate": 4.309859154929578e-05, |
| "loss": 1.3426867723464966, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.1148936170212766, |
| "grad_norm": 0.5078125, |
| "learning_rate": 4.47887323943662e-05, |
| "loss": 1.4301996231079102, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.11914893617021277, |
| "grad_norm": 0.44140625, |
| "learning_rate": 4.6478873239436617e-05, |
| "loss": 1.2384978532791138, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.12340425531914893, |
| "grad_norm": 2.328125, |
| "learning_rate": 4.8169014084507045e-05, |
| "loss": 1.4298828840255737, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.1276595744680851, |
| "grad_norm": 0.71875, |
| "learning_rate": 4.9859154929577466e-05, |
| "loss": 1.3327827453613281, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.13191489361702127, |
| "grad_norm": 0.59375, |
| "learning_rate": 5.154929577464789e-05, |
| "loss": 1.281138300895691, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.13617021276595745, |
| "grad_norm": 1.0703125, |
| "learning_rate": 5.3239436619718316e-05, |
| "loss": 1.064106822013855, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.14042553191489363, |
| "grad_norm": 0.314453125, |
| "learning_rate": 5.492957746478874e-05, |
| "loss": 1.4334735870361328, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.14468085106382977, |
| "grad_norm": 0.78125, |
| "learning_rate": 5.661971830985915e-05, |
| "loss": 1.3736847639083862, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.14893617021276595, |
| "grad_norm": 1.2578125, |
| "learning_rate": 5.830985915492958e-05, |
| "loss": 1.3596861362457275, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.15319148936170213, |
| "grad_norm": 0.671875, |
| "learning_rate": 6e-05, |
| "loss": 1.4395864009857178, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.1574468085106383, |
| "grad_norm": 0.84765625, |
| "learning_rate": 5.9999735771881775e-05, |
| "loss": 1.5454157590866089, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.16170212765957448, |
| "grad_norm": 0.56640625, |
| "learning_rate": 5.99989430933451e-05, |
| "loss": 1.3853117227554321, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.16595744680851063, |
| "grad_norm": 0.408203125, |
| "learning_rate": 5.9997621981843994e-05, |
| "loss": 1.2754864692687988, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.1702127659574468, |
| "grad_norm": 0.92578125, |
| "learning_rate": 5.999577246646804e-05, |
| "loss": 1.5860013961791992, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.17446808510638298, |
| "grad_norm": 0.5859375, |
| "learning_rate": 5.99933945879417e-05, |
| "loss": 1.237565517425537, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.17872340425531916, |
| "grad_norm": 2.171875, |
| "learning_rate": 5.999048839862352e-05, |
| "loss": 1.2814902067184448, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.1829787234042553, |
| "grad_norm": 1.4296875, |
| "learning_rate": 5.998705396250493e-05, |
| "loss": 1.3300844430923462, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.18723404255319148, |
| "grad_norm": 0.462890625, |
| "learning_rate": 5.998309135520878e-05, |
| "loss": 1.3447200059890747, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.19148936170212766, |
| "grad_norm": 0.55078125, |
| "learning_rate": 5.997860066398778e-05, |
| "loss": 1.3371561765670776, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.19574468085106383, |
| "grad_norm": 0.6328125, |
| "learning_rate": 5.997358198772249e-05, |
| "loss": 1.3265268802642822, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.2, |
| "grad_norm": 0.51953125, |
| "learning_rate": 5.9968035436919206e-05, |
| "loss": 1.1992994546890259, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.20425531914893616, |
| "grad_norm": 0.5390625, |
| "learning_rate": 5.996196113370748e-05, |
| "loss": 1.2414522171020508, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.20851063829787234, |
| "grad_norm": 0.4921875, |
| "learning_rate": 5.9955359211837465e-05, |
| "loss": 1.314779281616211, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.2127659574468085, |
| "grad_norm": 0.6484375, |
| "learning_rate": 5.994822981667691e-05, |
| "loss": 1.2792837619781494, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.2170212765957447, |
| "grad_norm": 0.494140625, |
| "learning_rate": 5.994057310520807e-05, |
| "loss": 1.234731912612915, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.22127659574468084, |
| "grad_norm": 0.859375, |
| "learning_rate": 5.993238924602414e-05, |
| "loss": 1.1855965852737427, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.225531914893617, |
| "grad_norm": 0.427734375, |
| "learning_rate": 5.9923678419325616e-05, |
| "loss": 1.3185398578643799, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.2297872340425532, |
| "grad_norm": 0.8359375, |
| "learning_rate": 5.9914440816916236e-05, |
| "loss": 1.3415206670761108, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.23404255319148937, |
| "grad_norm": 1.2421875, |
| "learning_rate": 5.990467664219887e-05, |
| "loss": 1.7355936765670776, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.23829787234042554, |
| "grad_norm": 0.474609375, |
| "learning_rate": 5.989438611017101e-05, |
| "loss": 1.299019455909729, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.2425531914893617, |
| "grad_norm": 0.73046875, |
| "learning_rate": 5.9883569447419946e-05, |
| "loss": 1.2734484672546387, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.24680851063829787, |
| "grad_norm": 0.83203125, |
| "learning_rate": 5.98722268921179e-05, |
| "loss": 1.3722704648971558, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.251063829787234, |
| "grad_norm": 1.4453125, |
| "learning_rate": 5.9860358694016695e-05, |
| "loss": 1.241133451461792, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.2553191489361702, |
| "grad_norm": 0.451171875, |
| "learning_rate": 5.984796511444231e-05, |
| "loss": 1.2068897485733032, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.25957446808510637, |
| "grad_norm": 0.66796875, |
| "learning_rate": 5.983504642628911e-05, |
| "loss": 1.0534322261810303, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.26382978723404255, |
| "grad_norm": 0.5078125, |
| "learning_rate": 5.9821602914013794e-05, |
| "loss": 1.306460976600647, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.2680851063829787, |
| "grad_norm": 0.5625, |
| "learning_rate": 5.9807634873629205e-05, |
| "loss": 1.3372706174850464, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.2723404255319149, |
| "grad_norm": 1.546875, |
| "learning_rate": 5.979314261269775e-05, |
| "loss": 1.3372623920440674, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.2765957446808511, |
| "grad_norm": 3.953125, |
| "learning_rate": 5.9778126450324674e-05, |
| "loss": 1.303137183189392, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.28085106382978725, |
| "grad_norm": 0.546875, |
| "learning_rate": 5.9762586717151e-05, |
| "loss": 1.2485641241073608, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.2851063829787234, |
| "grad_norm": 0.45703125, |
| "learning_rate": 5.9746523755346284e-05, |
| "loss": 1.2784473896026611, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.28936170212765955, |
| "grad_norm": 0.9375, |
| "learning_rate": 5.972993791860101e-05, |
| "loss": 1.3001835346221924, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.2936170212765957, |
| "grad_norm": 1.2734375, |
| "learning_rate": 5.97128295721189e-05, |
| "loss": 1.2696309089660645, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.2978723404255319, |
| "grad_norm": 0.796875, |
| "learning_rate": 5.969519909260879e-05, |
| "loss": 1.4176753759384155, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.3021276595744681, |
| "grad_norm": 0.5546875, |
| "learning_rate": 5.967704686827641e-05, |
| "loss": 1.3172787427902222, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.30638297872340425, |
| "grad_norm": 14.0, |
| "learning_rate": 5.965837329881574e-05, |
| "loss": 1.4879260063171387, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.31063829787234043, |
| "grad_norm": 0.4453125, |
| "learning_rate": 5.9639178795400295e-05, |
| "loss": 1.1774471998214722, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.3148936170212766, |
| "grad_norm": 0.71875, |
| "learning_rate": 5.9619463780674034e-05, |
| "loss": 1.2439987659454346, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.3191489361702128, |
| "grad_norm": 0.48828125, |
| "learning_rate": 5.959922868874206e-05, |
| "loss": 1.205959439277649, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.32340425531914896, |
| "grad_norm": 1.1796875, |
| "learning_rate": 5.9578473965161075e-05, |
| "loss": 1.330673098564148, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.3276595744680851, |
| "grad_norm": 0.455078125, |
| "learning_rate": 5.9557200066929534e-05, |
| "loss": 1.2126266956329346, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.33191489361702126, |
| "grad_norm": 0.4140625, |
| "learning_rate": 5.95354074624776e-05, |
| "loss": 1.2708547115325928, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.33617021276595743, |
| "grad_norm": 0.6953125, |
| "learning_rate": 5.951309663165686e-05, |
| "loss": 1.2793720960617065, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.3404255319148936, |
| "grad_norm": 0.310546875, |
| "learning_rate": 5.9490268065729724e-05, |
| "loss": 1.2078324556350708, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.3446808510638298, |
| "grad_norm": 0.53515625, |
| "learning_rate": 5.946692226735858e-05, |
| "loss": 1.2282007932662964, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.34893617021276596, |
| "grad_norm": 0.74609375, |
| "learning_rate": 5.944305975059482e-05, |
| "loss": 1.2256958484649658, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.35319148936170214, |
| "grad_norm": 0.56640625, |
| "learning_rate": 5.9418681040867445e-05, |
| "loss": 1.2895530462265015, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.3574468085106383, |
| "grad_norm": 0.50390625, |
| "learning_rate": 5.939378667497147e-05, |
| "loss": 1.1319845914840698, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.3617021276595745, |
| "grad_norm": 0.828125, |
| "learning_rate": 5.9368377201056195e-05, |
| "loss": 1.3089065551757812, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.3659574468085106, |
| "grad_norm": 0.474609375, |
| "learning_rate": 5.934245317861308e-05, |
| "loss": 1.2677334547042847, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.3702127659574468, |
| "grad_norm": 0.45703125, |
| "learning_rate": 5.9316015178463446e-05, |
| "loss": 1.2161953449249268, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.37446808510638296, |
| "grad_norm": 0.890625, |
| "learning_rate": 5.928906378274585e-05, |
| "loss": 1.4091310501098633, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.37872340425531914, |
| "grad_norm": 0.8984375, |
| "learning_rate": 5.9261599584903375e-05, |
| "loss": 1.3989202976226807, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.3829787234042553, |
| "grad_norm": 0.55859375, |
| "learning_rate": 5.923362318967043e-05, |
| "loss": 1.1342294216156006, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.3872340425531915, |
| "grad_norm": 0.390625, |
| "learning_rate": 5.920513521305955e-05, |
| "loss": 1.2139039039611816, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.39148936170212767, |
| "grad_norm": 0.76953125, |
| "learning_rate": 5.9176136282347777e-05, |
| "loss": 1.12371027469635, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.39574468085106385, |
| "grad_norm": 0.84765625, |
| "learning_rate": 5.914662703606285e-05, |
| "loss": 1.3441473245620728, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.4, |
| "grad_norm": 0.9140625, |
| "learning_rate": 5.911660812396916e-05, |
| "loss": 1.2006664276123047, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.40425531914893614, |
| "grad_norm": 0.5, |
| "learning_rate": 5.90860802070534e-05, |
| "loss": 1.2136142253875732, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.4085106382978723, |
| "grad_norm": 0.59375, |
| "learning_rate": 5.905504395751011e-05, |
| "loss": 1.2749568223953247, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.4127659574468085, |
| "grad_norm": 0.40234375, |
| "learning_rate": 5.9023500058726754e-05, |
| "loss": 1.369842529296875, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.41702127659574467, |
| "grad_norm": 0.6171875, |
| "learning_rate": 5.899144920526876e-05, |
| "loss": 1.2179538011550903, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.42127659574468085, |
| "grad_norm": 0.765625, |
| "learning_rate": 5.895889210286416e-05, |
| "loss": 1.3043186664581299, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.425531914893617, |
| "grad_norm": 0.80859375, |
| "learning_rate": 5.8925829468388156e-05, |
| "loss": 1.309710144996643, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.4297872340425532, |
| "grad_norm": 0.36328125, |
| "learning_rate": 5.8892262029847186e-05, |
| "loss": 1.233137607574463, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.4340425531914894, |
| "grad_norm": 0.55859375, |
| "learning_rate": 5.885819052636303e-05, |
| "loss": 1.1934456825256348, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.43829787234042555, |
| "grad_norm": 0.625, |
| "learning_rate": 5.882361570815645e-05, |
| "loss": 1.2160348892211914, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.4425531914893617, |
| "grad_norm": 0.87890625, |
| "learning_rate": 5.8788538336530715e-05, |
| "loss": 1.2657216787338257, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.44680851063829785, |
| "grad_norm": 0.58203125, |
| "learning_rate": 5.8752959183854816e-05, |
| "loss": 1.2517626285552979, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.451063829787234, |
| "grad_norm": 0.66796875, |
| "learning_rate": 5.871687903354646e-05, |
| "loss": 1.1521042585372925, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.4553191489361702, |
| "grad_norm": 0.61328125, |
| "learning_rate": 5.8680298680054824e-05, |
| "loss": 1.2568317651748657, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.4595744680851064, |
| "grad_norm": 1.46875, |
| "learning_rate": 5.864321892884309e-05, |
| "loss": 1.2429876327514648, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.46382978723404256, |
| "grad_norm": 0.64453125, |
| "learning_rate": 5.860564059637066e-05, |
| "loss": 1.327577829360962, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.46808510638297873, |
| "grad_norm": 3.3125, |
| "learning_rate": 5.85675645100752e-05, |
| "loss": 1.416573166847229, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.4723404255319149, |
| "grad_norm": 0.828125, |
| "learning_rate": 5.852899150835445e-05, |
| "loss": 1.241747498512268, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.4765957446808511, |
| "grad_norm": 0.48046875, |
| "learning_rate": 5.848992244054768e-05, |
| "loss": 1.2418628931045532, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.4808510638297872, |
| "grad_norm": 0.58203125, |
| "learning_rate": 5.845035816691711e-05, |
| "loss": 1.1999211311340332, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.4851063829787234, |
| "grad_norm": 0.71875, |
| "learning_rate": 5.841029955862885e-05, |
| "loss": 1.4046475887298584, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.48936170212765956, |
| "grad_norm": 0.5234375, |
| "learning_rate": 5.836974749773382e-05, |
| "loss": 1.2456966638565063, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.49361702127659574, |
| "grad_norm": 0.578125, |
| "learning_rate": 5.83287028771482e-05, |
| "loss": 1.2392256259918213, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.4978723404255319, |
| "grad_norm": 0.6171875, |
| "learning_rate": 5.828716660063395e-05, |
| "loss": 1.3959895372390747, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.502127659574468, |
| "grad_norm": 0.482421875, |
| "learning_rate": 5.824513958277871e-05, |
| "loss": 1.2444478273391724, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.5063829787234042, |
| "grad_norm": 0.6328125, |
| "learning_rate": 5.8202622748975817e-05, |
| "loss": 1.294707179069519, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.5106382978723404, |
| "grad_norm": 1.5703125, |
| "learning_rate": 5.815961703540386e-05, |
| "loss": 1.3415395021438599, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.5148936170212766, |
| "grad_norm": 1.328125, |
| "learning_rate": 5.811612338900606e-05, |
| "loss": 1.1040756702423096, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.5191489361702127, |
| "grad_norm": 0.26953125, |
| "learning_rate": 5.807214276746943e-05, |
| "loss": 1.1671684980392456, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.5234042553191489, |
| "grad_norm": 0.474609375, |
| "learning_rate": 5.802767613920375e-05, |
| "loss": 1.1621744632720947, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.5276595744680851, |
| "grad_norm": 0.6171875, |
| "learning_rate": 5.7982724483320105e-05, |
| "loss": 1.2460886240005493, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.5319148936170213, |
| "grad_norm": 0.734375, |
| "learning_rate": 5.793728878960947e-05, |
| "loss": 1.3446158170700073, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.5361702127659574, |
| "grad_norm": 0.35546875, |
| "learning_rate": 5.789137005852084e-05, |
| "loss": 1.4579079151153564, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.5404255319148936, |
| "grad_norm": 0.74609375, |
| "learning_rate": 5.7844969301139194e-05, |
| "loss": 1.4334136247634888, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.5446808510638298, |
| "grad_norm": 1.421875, |
| "learning_rate": 5.779808753916325e-05, |
| "loss": 1.6397161483764648, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.548936170212766, |
| "grad_norm": 0.6953125, |
| "learning_rate": 5.7750725804883034e-05, |
| "loss": 1.301457166671753, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.5531914893617021, |
| "grad_norm": 0.7578125, |
| "learning_rate": 5.770288514115698e-05, |
| "loss": 1.2889872789382935, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.5574468085106383, |
| "grad_norm": 0.765625, |
| "learning_rate": 5.765456660138919e-05, |
| "loss": 1.2275917530059814, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.5617021276595745, |
| "grad_norm": 0.546875, |
| "learning_rate": 5.760577124950603e-05, |
| "loss": 1.2264875173568726, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.5659574468085107, |
| "grad_norm": 0.75, |
| "learning_rate": 5.7556500159932835e-05, |
| "loss": 1.3288713693618774, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.5702127659574469, |
| "grad_norm": 0.4609375, |
| "learning_rate": 5.750675441757024e-05, |
| "loss": 1.1868747472763062, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.574468085106383, |
| "grad_norm": 1.53125, |
| "learning_rate": 5.7456535117770204e-05, |
| "loss": 1.3627235889434814, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.5787234042553191, |
| "grad_norm": 0.73828125, |
| "learning_rate": 5.7405843366312e-05, |
| "loss": 1.2165353298187256, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.5829787234042553, |
| "grad_norm": 0.6484375, |
| "learning_rate": 5.735468027937782e-05, |
| "loss": 1.3962359428405762, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.5872340425531914, |
| "grad_norm": 0.49609375, |
| "learning_rate": 5.730304698352815e-05, |
| "loss": 1.2368425130844116, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.5914893617021276, |
| "grad_norm": 0.65625, |
| "learning_rate": 5.7250944615677036e-05, |
| "loss": 1.2761774063110352, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.5957446808510638, |
| "grad_norm": 0.6328125, |
| "learning_rate": 5.719837432306707e-05, |
| "loss": 1.146309494972229, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.6, |
| "grad_norm": 1.0859375, |
| "learning_rate": 5.7145337263244e-05, |
| "loss": 1.3055988550186157, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.6042553191489362, |
| "grad_norm": 0.6875, |
| "learning_rate": 5.7091834604031386e-05, |
| "loss": 1.2853789329528809, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.6085106382978723, |
| "grad_norm": 0.5078125, |
| "learning_rate": 5.703786752350482e-05, |
| "loss": 1.2626879215240479, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.6127659574468085, |
| "grad_norm": 0.8515625, |
| "learning_rate": 5.698343720996596e-05, |
| "loss": 1.0880780220031738, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.6170212765957447, |
| "grad_norm": 0.5859375, |
| "learning_rate": 5.6928544861916416e-05, |
| "loss": 1.4804942607879639, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.6212765957446809, |
| "grad_norm": 0.482421875, |
| "learning_rate": 5.687319168803136e-05, |
| "loss": 1.2268519401550293, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.625531914893617, |
| "grad_norm": 0.6015625, |
| "learning_rate": 5.681737890713286e-05, |
| "loss": 1.2773425579071045, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.6297872340425532, |
| "grad_norm": 0.40625, |
| "learning_rate": 5.676110774816309e-05, |
| "loss": 1.3065530061721802, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.6340425531914894, |
| "grad_norm": 0.50390625, |
| "learning_rate": 5.670437945015725e-05, |
| "loss": 1.2505621910095215, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.6382978723404256, |
| "grad_norm": 1.515625, |
| "learning_rate": 5.6647195262216296e-05, |
| "loss": 1.2247607707977295, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.6425531914893617, |
| "grad_norm": 0.296875, |
| "learning_rate": 5.658955644347944e-05, |
| "loss": 1.3000612258911133, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.6468085106382979, |
| "grad_norm": 0.50390625, |
| "learning_rate": 5.653146426309637e-05, |
| "loss": 1.2301831245422363, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.6510638297872341, |
| "grad_norm": 2.875, |
| "learning_rate": 5.64729200001994e-05, |
| "loss": 1.23581862449646, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.6553191489361702, |
| "grad_norm": 0.68359375, |
| "learning_rate": 5.6413924943875225e-05, |
| "loss": 1.2415351867675781, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.6595744680851063, |
| "grad_norm": 0.44921875, |
| "learning_rate": 5.635448039313658e-05, |
| "loss": 1.054430365562439, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.6638297872340425, |
| "grad_norm": 0.640625, |
| "learning_rate": 5.62945876568936e-05, |
| "loss": 1.1587709188461304, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.6680851063829787, |
| "grad_norm": 0.61328125, |
| "learning_rate": 5.623424805392504e-05, |
| "loss": 1.2375378608703613, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.6723404255319149, |
| "grad_norm": 0.44140625, |
| "learning_rate": 5.617346291284922e-05, |
| "loss": 1.2096360921859741, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.676595744680851, |
| "grad_norm": 3.484375, |
| "learning_rate": 5.611223357209474e-05, |
| "loss": 1.4315375089645386, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.6808510638297872, |
| "grad_norm": 0.515625, |
| "learning_rate": 5.6050561379871077e-05, |
| "loss": 1.2253354787826538, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.6851063829787234, |
| "grad_norm": 0.431640625, |
| "learning_rate": 5.5988447694138834e-05, |
| "loss": 1.1945621967315674, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.6893617021276596, |
| "grad_norm": 0.72265625, |
| "learning_rate": 5.5925893882579864e-05, |
| "loss": 1.2644530534744263, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.6936170212765957, |
| "grad_norm": 0.9921875, |
| "learning_rate": 5.5862901322567175e-05, |
| "loss": 1.2183250188827515, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.6978723404255319, |
| "grad_norm": 1.2421875, |
| "learning_rate": 5.5799471401134543e-05, |
| "loss": 1.4243056774139404, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.7021276595744681, |
| "grad_norm": 0.671875, |
| "learning_rate": 5.5735605514946046e-05, |
| "loss": 1.2356775999069214, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.7063829787234043, |
| "grad_norm": 2.078125, |
| "learning_rate": 5.567130507026527e-05, |
| "loss": 1.245442509651184, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.7106382978723405, |
| "grad_norm": 4.28125, |
| "learning_rate": 5.560657148292432e-05, |
| "loss": 1.0984729528427124, |
| "step": 334 |
| }, |
| { |
| "epoch": 0.7148936170212766, |
| "grad_norm": 0.361328125, |
| "learning_rate": 5.554140617829271e-05, |
| "loss": 1.2282965183258057, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.7191489361702128, |
| "grad_norm": 0.875, |
| "learning_rate": 5.547581059124591e-05, |
| "loss": 1.3061587810516357, |
| "step": 338 |
| }, |
| { |
| "epoch": 0.723404255319149, |
| "grad_norm": 0.85546875, |
| "learning_rate": 5.540978616613381e-05, |
| "loss": 1.2117385864257812, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.7276595744680852, |
| "grad_norm": 0.78515625, |
| "learning_rate": 5.534333435674889e-05, |
| "loss": 1.105977177619934, |
| "step": 342 |
| }, |
| { |
| "epoch": 0.7319148936170212, |
| "grad_norm": 0.462890625, |
| "learning_rate": 5.527645662629417e-05, |
| "loss": 1.1953120231628418, |
| "step": 344 |
| }, |
| { |
| "epoch": 0.7361702127659574, |
| "grad_norm": 0.59375, |
| "learning_rate": 5.520915444735106e-05, |
| "loss": 1.2646780014038086, |
| "step": 346 |
| }, |
| { |
| "epoch": 0.7404255319148936, |
| "grad_norm": 0.390625, |
| "learning_rate": 5.514142930184689e-05, |
| "loss": 1.2484480142593384, |
| "step": 348 |
| }, |
| { |
| "epoch": 0.7446808510638298, |
| "grad_norm": 1.8203125, |
| "learning_rate": 5.5073282681022354e-05, |
| "loss": 1.274402379989624, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.7489361702127659, |
| "grad_norm": 0.37890625, |
| "learning_rate": 5.5004716085398515e-05, |
| "loss": 1.182061791419983, |
| "step": 352 |
| }, |
| { |
| "epoch": 0.7531914893617021, |
| "grad_norm": 1.015625, |
| "learning_rate": 5.493573102474395e-05, |
| "loss": 1.1438305377960205, |
| "step": 354 |
| }, |
| { |
| "epoch": 0.7574468085106383, |
| "grad_norm": 0.80078125, |
| "learning_rate": 5.486632901804137e-05, |
| "loss": 1.2968688011169434, |
| "step": 356 |
| }, |
| { |
| "epoch": 0.7617021276595745, |
| "grad_norm": 0.42578125, |
| "learning_rate": 5.4796511593454254e-05, |
| "loss": 1.1422514915466309, |
| "step": 358 |
| }, |
| { |
| "epoch": 0.7659574468085106, |
| "grad_norm": 0.53515625, |
| "learning_rate": 5.4726280288293156e-05, |
| "loss": 1.20034658908844, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.7702127659574468, |
| "grad_norm": 0.453125, |
| "learning_rate": 5.4655636648981876e-05, |
| "loss": 1.224547266960144, |
| "step": 362 |
| }, |
| { |
| "epoch": 0.774468085106383, |
| "grad_norm": 0.53515625, |
| "learning_rate": 5.458458223102342e-05, |
| "loss": 1.2625551223754883, |
| "step": 364 |
| }, |
| { |
| "epoch": 0.7787234042553192, |
| "grad_norm": 0.60546875, |
| "learning_rate": 5.45131185989657e-05, |
| "loss": 1.170459508895874, |
| "step": 366 |
| }, |
| { |
| "epoch": 0.7829787234042553, |
| "grad_norm": 0.439453125, |
| "learning_rate": 5.444124732636714e-05, |
| "loss": 1.1499803066253662, |
| "step": 368 |
| }, |
| { |
| "epoch": 0.7872340425531915, |
| "grad_norm": 0.6640625, |
| "learning_rate": 5.4368969995762e-05, |
| "loss": 1.2377893924713135, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.7914893617021277, |
| "grad_norm": 0.83203125, |
| "learning_rate": 5.429628819862553e-05, |
| "loss": 1.1148861646652222, |
| "step": 372 |
| }, |
| { |
| "epoch": 0.7957446808510639, |
| "grad_norm": 0.546875, |
| "learning_rate": 5.4223203535338945e-05, |
| "loss": 1.0856741666793823, |
| "step": 374 |
| }, |
| { |
| "epoch": 0.8, |
| "grad_norm": 1.0390625, |
| "learning_rate": 5.414971761515417e-05, |
| "loss": 1.150919795036316, |
| "step": 376 |
| }, |
| { |
| "epoch": 0.8042553191489362, |
| "grad_norm": 0.60546875, |
| "learning_rate": 5.4075832056158395e-05, |
| "loss": 1.21478271484375, |
| "step": 378 |
| }, |
| { |
| "epoch": 0.8085106382978723, |
| "grad_norm": 0.66796875, |
| "learning_rate": 5.400154848523847e-05, |
| "loss": 1.2340768575668335, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.8127659574468085, |
| "grad_norm": 0.58984375, |
| "learning_rate": 5.392686853804508e-05, |
| "loss": 1.1131625175476074, |
| "step": 382 |
| }, |
| { |
| "epoch": 0.8170212765957446, |
| "grad_norm": 1.7578125, |
| "learning_rate": 5.385179385895671e-05, |
| "loss": 1.203751564025879, |
| "step": 384 |
| }, |
| { |
| "epoch": 0.8212765957446808, |
| "grad_norm": 1.5625, |
| "learning_rate": 5.377632610104347e-05, |
| "loss": 1.090701937675476, |
| "step": 386 |
| }, |
| { |
| "epoch": 0.825531914893617, |
| "grad_norm": 0.61328125, |
| "learning_rate": 5.370046692603067e-05, |
| "loss": 1.1695449352264404, |
| "step": 388 |
| }, |
| { |
| "epoch": 0.8297872340425532, |
| "grad_norm": 0.416015625, |
| "learning_rate": 5.362421800426221e-05, |
| "loss": 1.2987892627716064, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.8340425531914893, |
| "grad_norm": 0.5703125, |
| "learning_rate": 5.354758101466387e-05, |
| "loss": 1.145818829536438, |
| "step": 392 |
| }, |
| { |
| "epoch": 0.8382978723404255, |
| "grad_norm": 0.76171875, |
| "learning_rate": 5.347055764470628e-05, |
| "loss": 1.0881308317184448, |
| "step": 394 |
| }, |
| { |
| "epoch": 0.8425531914893617, |
| "grad_norm": 0.45703125, |
| "learning_rate": 5.3393149590367764e-05, |
| "loss": 1.312870740890503, |
| "step": 396 |
| }, |
| { |
| "epoch": 0.8468085106382979, |
| "grad_norm": 1.3203125, |
| "learning_rate": 5.331535855609704e-05, |
| "loss": 1.239678144454956, |
| "step": 398 |
| }, |
| { |
| "epoch": 0.851063829787234, |
| "grad_norm": 0.42578125, |
| "learning_rate": 5.323718625477566e-05, |
| "loss": 1.1566566228866577, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.8553191489361702, |
| "grad_norm": 3.984375, |
| "learning_rate": 5.315863440768031e-05, |
| "loss": 1.2142958641052246, |
| "step": 402 |
| }, |
| { |
| "epoch": 0.8595744680851064, |
| "grad_norm": 0.7265625, |
| "learning_rate": 5.307970474444487e-05, |
| "loss": 1.1810637712478638, |
| "step": 404 |
| }, |
| { |
| "epoch": 0.8638297872340426, |
| "grad_norm": 0.484375, |
| "learning_rate": 5.300039900302237e-05, |
| "loss": 1.216611623764038, |
| "step": 406 |
| }, |
| { |
| "epoch": 0.8680851063829788, |
| "grad_norm": 0.43359375, |
| "learning_rate": 5.292071892964672e-05, |
| "loss": 1.2652095556259155, |
| "step": 408 |
| }, |
| { |
| "epoch": 0.8723404255319149, |
| "grad_norm": 0.9921875, |
| "learning_rate": 5.2840666278794244e-05, |
| "loss": 1.1579747200012207, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.8765957446808511, |
| "grad_norm": 0.62109375, |
| "learning_rate": 5.276024281314504e-05, |
| "loss": 1.2436968088150024, |
| "step": 412 |
| }, |
| { |
| "epoch": 0.8808510638297873, |
| "grad_norm": 1.03125, |
| "learning_rate": 5.267945030354419e-05, |
| "loss": 1.1159509420394897, |
| "step": 414 |
| }, |
| { |
| "epoch": 0.8851063829787233, |
| "grad_norm": 0.421875, |
| "learning_rate": 5.259829052896277e-05, |
| "loss": 1.1389038562774658, |
| "step": 416 |
| }, |
| { |
| "epoch": 0.8893617021276595, |
| "grad_norm": 0.59765625, |
| "learning_rate": 5.251676527645864e-05, |
| "loss": 1.179782509803772, |
| "step": 418 |
| }, |
| { |
| "epoch": 0.8936170212765957, |
| "grad_norm": 0.83203125, |
| "learning_rate": 5.243487634113716e-05, |
| "loss": 1.2923263311386108, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.8978723404255319, |
| "grad_norm": 2.09375, |
| "learning_rate": 5.235262552611159e-05, |
| "loss": 1.0829358100891113, |
| "step": 422 |
| }, |
| { |
| "epoch": 0.902127659574468, |
| "grad_norm": 0.859375, |
| "learning_rate": 5.227001464246343e-05, |
| "loss": 1.155044674873352, |
| "step": 424 |
| }, |
| { |
| "epoch": 0.9063829787234042, |
| "grad_norm": 0.5234375, |
| "learning_rate": 5.218704550920257e-05, |
| "loss": 1.3217086791992188, |
| "step": 426 |
| }, |
| { |
| "epoch": 0.9106382978723404, |
| "grad_norm": 0.63671875, |
| "learning_rate": 5.210371995322713e-05, |
| "loss": 1.2187201976776123, |
| "step": 428 |
| }, |
| { |
| "epoch": 0.9148936170212766, |
| "grad_norm": 0.55078125, |
| "learning_rate": 5.202003980928338e-05, |
| "loss": 1.1969107389450073, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.9191489361702128, |
| "grad_norm": 0.76953125, |
| "learning_rate": 5.1936006919925216e-05, |
| "loss": 1.2421386241912842, |
| "step": 432 |
| }, |
| { |
| "epoch": 0.9234042553191489, |
| "grad_norm": 0.609375, |
| "learning_rate": 5.185162313547368e-05, |
| "loss": 1.5777398347854614, |
| "step": 434 |
| }, |
| { |
| "epoch": 0.9276595744680851, |
| "grad_norm": 0.59765625, |
| "learning_rate": 5.176689031397612e-05, |
| "loss": 1.2189395427703857, |
| "step": 436 |
| }, |
| { |
| "epoch": 0.9319148936170213, |
| "grad_norm": 0.546875, |
| "learning_rate": 5.16818103211654e-05, |
| "loss": 1.191472053527832, |
| "step": 438 |
| }, |
| { |
| "epoch": 0.9361702127659575, |
| "grad_norm": 0.515625, |
| "learning_rate": 5.1596385030418686e-05, |
| "loss": 1.0312530994415283, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.9404255319148936, |
| "grad_norm": 0.72265625, |
| "learning_rate": 5.151061632271633e-05, |
| "loss": 1.2013260126113892, |
| "step": 442 |
| }, |
| { |
| "epoch": 0.9446808510638298, |
| "grad_norm": 0.6796875, |
| "learning_rate": 5.142450608660035e-05, |
| "loss": 1.1814100742340088, |
| "step": 444 |
| }, |
| { |
| "epoch": 0.948936170212766, |
| "grad_norm": 0.796875, |
| "learning_rate": 5.133805621813285e-05, |
| "loss": 1.3198750019073486, |
| "step": 446 |
| }, |
| { |
| "epoch": 0.9531914893617022, |
| "grad_norm": 0.421875, |
| "learning_rate": 5.1251268620854375e-05, |
| "loss": 1.1754573583602905, |
| "step": 448 |
| }, |
| { |
| "epoch": 0.9574468085106383, |
| "grad_norm": 0.396484375, |
| "learning_rate": 5.116414520574186e-05, |
| "loss": 1.1766588687896729, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.9617021276595744, |
| "grad_norm": 0.41796875, |
| "learning_rate": 5.107668789116664e-05, |
| "loss": 1.1669294834136963, |
| "step": 452 |
| }, |
| { |
| "epoch": 0.9659574468085106, |
| "grad_norm": 0.6171875, |
| "learning_rate": 5.0988898602852204e-05, |
| "loss": 1.0226119756698608, |
| "step": 454 |
| }, |
| { |
| "epoch": 0.9702127659574468, |
| "grad_norm": 0.392578125, |
| "learning_rate": 5.090077927383173e-05, |
| "loss": 1.184398889541626, |
| "step": 456 |
| }, |
| { |
| "epoch": 0.9744680851063829, |
| "grad_norm": 0.71875, |
| "learning_rate": 5.081233184440562e-05, |
| "loss": 1.2115238904953003, |
| "step": 458 |
| }, |
| { |
| "epoch": 0.9787234042553191, |
| "grad_norm": 0.412109375, |
| "learning_rate": 5.072355826209869e-05, |
| "loss": 1.169049620628357, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.9829787234042553, |
| "grad_norm": 0.5703125, |
| "learning_rate": 5.063446048161731e-05, |
| "loss": 1.2979567050933838, |
| "step": 462 |
| }, |
| { |
| "epoch": 0.9872340425531915, |
| "grad_norm": 0.71484375, |
| "learning_rate": 5.054504046480638e-05, |
| "loss": 1.046877145767212, |
| "step": 464 |
| }, |
| { |
| "epoch": 0.9914893617021276, |
| "grad_norm": 2.078125, |
| "learning_rate": 5.0455300180606165e-05, |
| "loss": 1.2601969242095947, |
| "step": 466 |
| }, |
| { |
| "epoch": 0.9957446808510638, |
| "grad_norm": 0.59375, |
| "learning_rate": 5.036524160500883e-05, |
| "loss": 1.1848105192184448, |
| "step": 468 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 0.486328125, |
| "learning_rate": 5.027486672101506e-05, |
| "loss": 1.2240030765533447, |
| "step": 470 |
| }, |
| { |
| "epoch": 1.004255319148936, |
| "grad_norm": 0.482421875, |
| "learning_rate": 5.0184177518590294e-05, |
| "loss": 0.9678438901901245, |
| "step": 472 |
| }, |
| { |
| "epoch": 1.0085106382978724, |
| "grad_norm": 1.21875, |
| "learning_rate": 5.0093175994621006e-05, |
| "loss": 0.8098158836364746, |
| "step": 474 |
| }, |
| { |
| "epoch": 1.0127659574468084, |
| "grad_norm": 0.5390625, |
| "learning_rate": 5.000186415287064e-05, |
| "loss": 1.0377544164657593, |
| "step": 476 |
| }, |
| { |
| "epoch": 1.0170212765957447, |
| "grad_norm": 0.578125, |
| "learning_rate": 4.9910244003935514e-05, |
| "loss": 0.9136829376220703, |
| "step": 478 |
| }, |
| { |
| "epoch": 1.0212765957446808, |
| "grad_norm": 0.96875, |
| "learning_rate": 4.981831756520061e-05, |
| "loss": 0.8520618081092834, |
| "step": 480 |
| }, |
| { |
| "epoch": 1.025531914893617, |
| "grad_norm": 0.875, |
| "learning_rate": 4.972608686079509e-05, |
| "loss": 0.7956000566482544, |
| "step": 482 |
| }, |
| { |
| "epoch": 1.0297872340425531, |
| "grad_norm": 0.3984375, |
| "learning_rate": 4.963355392154774e-05, |
| "loss": 0.9468518495559692, |
| "step": 484 |
| }, |
| { |
| "epoch": 1.0340425531914894, |
| "grad_norm": 0.671875, |
| "learning_rate": 4.954072078494226e-05, |
| "loss": 1.131507158279419, |
| "step": 486 |
| }, |
| { |
| "epoch": 1.0382978723404255, |
| "grad_norm": 0.37890625, |
| "learning_rate": 4.944758949507241e-05, |
| "loss": 1.0291956663131714, |
| "step": 488 |
| }, |
| { |
| "epoch": 1.0425531914893618, |
| "grad_norm": 0.65625, |
| "learning_rate": 4.9354162102596946e-05, |
| "loss": 1.1023054122924805, |
| "step": 490 |
| }, |
| { |
| "epoch": 1.0468085106382978, |
| "grad_norm": 0.51171875, |
| "learning_rate": 4.926044066469459e-05, |
| "loss": 0.9480533599853516, |
| "step": 492 |
| }, |
| { |
| "epoch": 1.0510638297872341, |
| "grad_norm": 0.58984375, |
| "learning_rate": 4.916642724501856e-05, |
| "loss": 0.898105263710022, |
| "step": 494 |
| }, |
| { |
| "epoch": 1.0553191489361702, |
| "grad_norm": 0.38671875, |
| "learning_rate": 4.9072123913651306e-05, |
| "loss": 0.9935942888259888, |
| "step": 496 |
| }, |
| { |
| "epoch": 1.0595744680851065, |
| "grad_norm": 0.703125, |
| "learning_rate": 4.89775327470588e-05, |
| "loss": 0.7786449193954468, |
| "step": 498 |
| }, |
| { |
| "epoch": 1.0638297872340425, |
| "grad_norm": 0.498046875, |
| "learning_rate": 4.888265582804487e-05, |
| "loss": 0.8598610758781433, |
| "step": 500 |
| }, |
| { |
| "epoch": 1.0680851063829788, |
| "grad_norm": 0.42578125, |
| "learning_rate": 4.878749524570533e-05, |
| "loss": 0.9269306063652039, |
| "step": 502 |
| }, |
| { |
| "epoch": 1.0723404255319149, |
| "grad_norm": 0.5390625, |
| "learning_rate": 4.869205309538197e-05, |
| "loss": 0.6500522494316101, |
| "step": 504 |
| }, |
| { |
| "epoch": 1.076595744680851, |
| "grad_norm": 0.765625, |
| "learning_rate": 4.8596331478616454e-05, |
| "loss": 0.9429140090942383, |
| "step": 506 |
| }, |
| { |
| "epoch": 1.0808510638297872, |
| "grad_norm": 0.48828125, |
| "learning_rate": 4.8500332503103996e-05, |
| "loss": 0.9708827137947083, |
| "step": 508 |
| }, |
| { |
| "epoch": 1.0851063829787233, |
| "grad_norm": 0.91796875, |
| "learning_rate": 4.8404058282646985e-05, |
| "loss": 0.9373714327812195, |
| "step": 510 |
| }, |
| { |
| "epoch": 1.0893617021276596, |
| "grad_norm": 1.671875, |
| "learning_rate": 4.830751093710844e-05, |
| "loss": 1.1464526653289795, |
| "step": 512 |
| }, |
| { |
| "epoch": 1.0936170212765957, |
| "grad_norm": 1.1875, |
| "learning_rate": 4.8210692592365296e-05, |
| "loss": 0.8451336026191711, |
| "step": 514 |
| }, |
| { |
| "epoch": 1.097872340425532, |
| "grad_norm": 0.8671875, |
| "learning_rate": 4.811360538026165e-05, |
| "loss": 1.058915376663208, |
| "step": 516 |
| }, |
| { |
| "epoch": 1.102127659574468, |
| "grad_norm": 1.1796875, |
| "learning_rate": 4.801625143856179e-05, |
| "loss": 1.225139856338501, |
| "step": 518 |
| }, |
| { |
| "epoch": 1.1063829787234043, |
| "grad_norm": 0.4609375, |
| "learning_rate": 4.79186329109031e-05, |
| "loss": 0.6482216119766235, |
| "step": 520 |
| }, |
| { |
| "epoch": 1.1106382978723404, |
| "grad_norm": 0.515625, |
| "learning_rate": 4.782075194674892e-05, |
| "loss": 0.802341103553772, |
| "step": 522 |
| }, |
| { |
| "epoch": 1.1148936170212767, |
| "grad_norm": 0.48828125, |
| "learning_rate": 4.772261070134113e-05, |
| "loss": 0.8717508316040039, |
| "step": 524 |
| }, |
| { |
| "epoch": 1.1191489361702127, |
| "grad_norm": 0.455078125, |
| "learning_rate": 4.762421133565284e-05, |
| "loss": 0.8982354402542114, |
| "step": 526 |
| }, |
| { |
| "epoch": 1.123404255319149, |
| "grad_norm": 0.83203125, |
| "learning_rate": 4.752555601634059e-05, |
| "loss": 1.0001122951507568, |
| "step": 528 |
| }, |
| { |
| "epoch": 1.127659574468085, |
| "grad_norm": 0.451171875, |
| "learning_rate": 4.742664691569685e-05, |
| "loss": 0.8902131915092468, |
| "step": 530 |
| }, |
| { |
| "epoch": 1.1319148936170214, |
| "grad_norm": 0.79296875, |
| "learning_rate": 4.732748621160212e-05, |
| "loss": 1.2693915367126465, |
| "step": 532 |
| }, |
| { |
| "epoch": 1.1361702127659574, |
| "grad_norm": 0.79296875, |
| "learning_rate": 4.7228076087476864e-05, |
| "loss": 0.8774423599243164, |
| "step": 534 |
| }, |
| { |
| "epoch": 1.1404255319148937, |
| "grad_norm": 0.671875, |
| "learning_rate": 4.71284187322336e-05, |
| "loss": 0.9756125211715698, |
| "step": 536 |
| }, |
| { |
| "epoch": 1.1446808510638298, |
| "grad_norm": 0.5546875, |
| "learning_rate": 4.702851634022864e-05, |
| "loss": 0.8841724991798401, |
| "step": 538 |
| }, |
| { |
| "epoch": 1.148936170212766, |
| "grad_norm": 0.275390625, |
| "learning_rate": 4.692837111121371e-05, |
| "loss": 0.794608473777771, |
| "step": 540 |
| }, |
| { |
| "epoch": 1.1531914893617021, |
| "grad_norm": 0.55859375, |
| "learning_rate": 4.6827985250287616e-05, |
| "loss": 0.9677450060844421, |
| "step": 542 |
| }, |
| { |
| "epoch": 1.1574468085106382, |
| "grad_norm": 0.51953125, |
| "learning_rate": 4.672736096784759e-05, |
| "loss": 0.9043182134628296, |
| "step": 544 |
| }, |
| { |
| "epoch": 1.1617021276595745, |
| "grad_norm": 0.34765625, |
| "learning_rate": 4.662650047954073e-05, |
| "loss": 0.9864247441291809, |
| "step": 546 |
| }, |
| { |
| "epoch": 1.1659574468085105, |
| "grad_norm": 0.58984375, |
| "learning_rate": 4.652540600621512e-05, |
| "loss": 0.7724499106407166, |
| "step": 548 |
| }, |
| { |
| "epoch": 1.1702127659574468, |
| "grad_norm": 0.357421875, |
| "learning_rate": 4.642407977387093e-05, |
| "loss": 0.8427982330322266, |
| "step": 550 |
| }, |
| { |
| "epoch": 1.174468085106383, |
| "grad_norm": 0.71484375, |
| "learning_rate": 4.632252401361149e-05, |
| "loss": 0.9444398283958435, |
| "step": 552 |
| }, |
| { |
| "epoch": 1.1787234042553192, |
| "grad_norm": 0.439453125, |
| "learning_rate": 4.622074096159409e-05, |
| "loss": 0.9146491289138794, |
| "step": 554 |
| }, |
| { |
| "epoch": 1.1829787234042553, |
| "grad_norm": 0.5390625, |
| "learning_rate": 4.6118732858980764e-05, |
| "loss": 0.9469373226165771, |
| "step": 556 |
| }, |
| { |
| "epoch": 1.1872340425531915, |
| "grad_norm": 0.3046875, |
| "learning_rate": 4.6016501951888916e-05, |
| "loss": 0.7791444659233093, |
| "step": 558 |
| }, |
| { |
| "epoch": 1.1914893617021276, |
| "grad_norm": 0.59765625, |
| "learning_rate": 4.591405049134189e-05, |
| "loss": 0.8431161046028137, |
| "step": 560 |
| }, |
| { |
| "epoch": 1.195744680851064, |
| "grad_norm": 0.609375, |
| "learning_rate": 4.5811380733219405e-05, |
| "loss": 0.9778469204902649, |
| "step": 562 |
| }, |
| { |
| "epoch": 1.2, |
| "grad_norm": 1.34375, |
| "learning_rate": 4.570849493820789e-05, |
| "loss": 1.0207704305648804, |
| "step": 564 |
| }, |
| { |
| "epoch": 1.2042553191489362, |
| "grad_norm": 0.4140625, |
| "learning_rate": 4.560539537175068e-05, |
| "loss": 0.9259340167045593, |
| "step": 566 |
| }, |
| { |
| "epoch": 1.2085106382978723, |
| "grad_norm": 0.71875, |
| "learning_rate": 4.550208430399813e-05, |
| "loss": 0.908126711845398, |
| "step": 568 |
| }, |
| { |
| "epoch": 1.2127659574468086, |
| "grad_norm": 0.4375, |
| "learning_rate": 4.539856400975767e-05, |
| "loss": 0.9130632877349854, |
| "step": 570 |
| }, |
| { |
| "epoch": 1.2170212765957447, |
| "grad_norm": 0.609375, |
| "learning_rate": 4.529483676844366e-05, |
| "loss": 0.9781907796859741, |
| "step": 572 |
| }, |
| { |
| "epoch": 1.2212765957446807, |
| "grad_norm": 0.51953125, |
| "learning_rate": 4.519090486402727e-05, |
| "loss": 1.0180959701538086, |
| "step": 574 |
| }, |
| { |
| "epoch": 1.225531914893617, |
| "grad_norm": 0.6328125, |
| "learning_rate": 4.5086770584986135e-05, |
| "loss": 0.8333351016044617, |
| "step": 576 |
| }, |
| { |
| "epoch": 1.2297872340425533, |
| "grad_norm": 0.396484375, |
| "learning_rate": 4.498243622425395e-05, |
| "loss": 0.7459168434143066, |
| "step": 578 |
| }, |
| { |
| "epoch": 1.2340425531914894, |
| "grad_norm": 0.39453125, |
| "learning_rate": 4.4877904079170046e-05, |
| "loss": 0.7476868033409119, |
| "step": 580 |
| }, |
| { |
| "epoch": 1.2382978723404254, |
| "grad_norm": 0.640625, |
| "learning_rate": 4.477317645142874e-05, |
| "loss": 0.7878999710083008, |
| "step": 582 |
| }, |
| { |
| "epoch": 1.2425531914893617, |
| "grad_norm": 0.7265625, |
| "learning_rate": 4.4668255647028706e-05, |
| "loss": 0.8205560445785522, |
| "step": 584 |
| }, |
| { |
| "epoch": 1.2468085106382978, |
| "grad_norm": 0.71875, |
| "learning_rate": 4.456314397622217e-05, |
| "loss": 0.9549564719200134, |
| "step": 586 |
| }, |
| { |
| "epoch": 1.251063829787234, |
| "grad_norm": 0.44921875, |
| "learning_rate": 4.445784375346406e-05, |
| "loss": 0.7480695247650146, |
| "step": 588 |
| }, |
| { |
| "epoch": 1.2553191489361701, |
| "grad_norm": 0.4765625, |
| "learning_rate": 4.435235729736101e-05, |
| "loss": 1.0009480714797974, |
| "step": 590 |
| }, |
| { |
| "epoch": 1.2595744680851064, |
| "grad_norm": 0.75390625, |
| "learning_rate": 4.4246686930620326e-05, |
| "loss": 0.7730486989021301, |
| "step": 592 |
| }, |
| { |
| "epoch": 1.2638297872340425, |
| "grad_norm": 0.62890625, |
| "learning_rate": 4.4140834979998853e-05, |
| "loss": 1.0018788576126099, |
| "step": 594 |
| }, |
| { |
| "epoch": 1.2680851063829788, |
| "grad_norm": 0.55078125, |
| "learning_rate": 4.403480377625176e-05, |
| "loss": 0.9256978631019592, |
| "step": 596 |
| }, |
| { |
| "epoch": 1.2723404255319148, |
| "grad_norm": 0.376953125, |
| "learning_rate": 4.392859565408114e-05, |
| "loss": 0.9084795713424683, |
| "step": 598 |
| }, |
| { |
| "epoch": 1.2765957446808511, |
| "grad_norm": 0.74609375, |
| "learning_rate": 4.38222129520847e-05, |
| "loss": 1.0896780490875244, |
| "step": 600 |
| }, |
| { |
| "epoch": 1.2808510638297872, |
| "grad_norm": 0.7890625, |
| "learning_rate": 4.3715658012704184e-05, |
| "loss": 0.992649495601654, |
| "step": 602 |
| }, |
| { |
| "epoch": 1.2851063829787235, |
| "grad_norm": 0.76171875, |
| "learning_rate": 4.360893318217386e-05, |
| "loss": 0.8589400053024292, |
| "step": 604 |
| }, |
| { |
| "epoch": 1.2893617021276595, |
| "grad_norm": 0.609375, |
| "learning_rate": 4.350204081046885e-05, |
| "loss": 1.02162766456604, |
| "step": 606 |
| }, |
| { |
| "epoch": 1.2936170212765958, |
| "grad_norm": 1.0234375, |
| "learning_rate": 4.3394983251253303e-05, |
| "loss": 0.83441162109375, |
| "step": 608 |
| }, |
| { |
| "epoch": 1.297872340425532, |
| "grad_norm": 0.69921875, |
| "learning_rate": 4.328776286182869e-05, |
| "loss": 1.0881268978118896, |
| "step": 610 |
| }, |
| { |
| "epoch": 1.302127659574468, |
| "grad_norm": 0.375, |
| "learning_rate": 4.3180382003081806e-05, |
| "loss": 0.9433637857437134, |
| "step": 612 |
| }, |
| { |
| "epoch": 1.3063829787234043, |
| "grad_norm": 0.55859375, |
| "learning_rate": 4.307284303943286e-05, |
| "loss": 1.0009987354278564, |
| "step": 614 |
| }, |
| { |
| "epoch": 1.3106382978723405, |
| "grad_norm": 0.70703125, |
| "learning_rate": 4.296514833878333e-05, |
| "loss": 0.9870180487632751, |
| "step": 616 |
| }, |
| { |
| "epoch": 1.3148936170212766, |
| "grad_norm": 1.0390625, |
| "learning_rate": 4.2857300272463896e-05, |
| "loss": 0.7985630631446838, |
| "step": 618 |
| }, |
| { |
| "epoch": 1.3191489361702127, |
| "grad_norm": 0.52734375, |
| "learning_rate": 4.274930121518221e-05, |
| "loss": 0.9055668115615845, |
| "step": 620 |
| }, |
| { |
| "epoch": 1.323404255319149, |
| "grad_norm": 0.49609375, |
| "learning_rate": 4.264115354497057e-05, |
| "loss": 0.9184504747390747, |
| "step": 622 |
| }, |
| { |
| "epoch": 1.327659574468085, |
| "grad_norm": 0.88671875, |
| "learning_rate": 4.253285964313358e-05, |
| "loss": 0.8388556241989136, |
| "step": 624 |
| }, |
| { |
| "epoch": 1.3319148936170213, |
| "grad_norm": 0.328125, |
| "learning_rate": 4.2424421894195746e-05, |
| "loss": 0.9362179040908813, |
| "step": 626 |
| }, |
| { |
| "epoch": 1.3361702127659574, |
| "grad_norm": 0.390625, |
| "learning_rate": 4.2315842685848914e-05, |
| "loss": 0.9783342480659485, |
| "step": 628 |
| }, |
| { |
| "epoch": 1.3404255319148937, |
| "grad_norm": 0.46875, |
| "learning_rate": 4.220712440889975e-05, |
| "loss": 1.012178659439087, |
| "step": 630 |
| }, |
| { |
| "epoch": 1.3446808510638297, |
| "grad_norm": 0.47265625, |
| "learning_rate": 4.2098269457217074e-05, |
| "loss": 0.9540913701057434, |
| "step": 632 |
| }, |
| { |
| "epoch": 1.348936170212766, |
| "grad_norm": 0.39453125, |
| "learning_rate": 4.1989280227679136e-05, |
| "loss": 0.9662917852401733, |
| "step": 634 |
| }, |
| { |
| "epoch": 1.353191489361702, |
| "grad_norm": 0.38671875, |
| "learning_rate": 4.188015912012085e-05, |
| "loss": 0.9110101461410522, |
| "step": 636 |
| }, |
| { |
| "epoch": 1.3574468085106384, |
| "grad_norm": 0.451171875, |
| "learning_rate": 4.177090853728096e-05, |
| "loss": 0.9384497404098511, |
| "step": 638 |
| }, |
| { |
| "epoch": 1.3617021276595744, |
| "grad_norm": 0.5234375, |
| "learning_rate": 4.1661530884749125e-05, |
| "loss": 0.7670997977256775, |
| "step": 640 |
| }, |
| { |
| "epoch": 1.3659574468085105, |
| "grad_norm": 0.50390625, |
| "learning_rate": 4.155202857091296e-05, |
| "loss": 0.8632846474647522, |
| "step": 642 |
| }, |
| { |
| "epoch": 1.3702127659574468, |
| "grad_norm": 0.474609375, |
| "learning_rate": 4.144240400690499e-05, |
| "loss": 0.6109669804573059, |
| "step": 644 |
| }, |
| { |
| "epoch": 1.374468085106383, |
| "grad_norm": 0.37890625, |
| "learning_rate": 4.133265960654956e-05, |
| "loss": 0.9433888792991638, |
| "step": 646 |
| }, |
| { |
| "epoch": 1.3787234042553191, |
| "grad_norm": 0.390625, |
| "learning_rate": 4.122279778630972e-05, |
| "loss": 0.9552747011184692, |
| "step": 648 |
| }, |
| { |
| "epoch": 1.3829787234042552, |
| "grad_norm": 0.546875, |
| "learning_rate": 4.1112820965233954e-05, |
| "loss": 1.0235886573791504, |
| "step": 650 |
| }, |
| { |
| "epoch": 1.3872340425531915, |
| "grad_norm": 0.54296875, |
| "learning_rate": 4.100273156490298e-05, |
| "loss": 0.9218517541885376, |
| "step": 652 |
| }, |
| { |
| "epoch": 1.3914893617021278, |
| "grad_norm": 0.4453125, |
| "learning_rate": 4.089253200937639e-05, |
| "loss": 0.9519026875495911, |
| "step": 654 |
| }, |
| { |
| "epoch": 1.3957446808510638, |
| "grad_norm": 0.5625, |
| "learning_rate": 4.078222472513928e-05, |
| "loss": 1.0031726360321045, |
| "step": 656 |
| }, |
| { |
| "epoch": 1.4, |
| "grad_norm": 0.3515625, |
| "learning_rate": 4.067181214104883e-05, |
| "loss": 0.9014978408813477, |
| "step": 658 |
| }, |
| { |
| "epoch": 1.4042553191489362, |
| "grad_norm": 0.78125, |
| "learning_rate": 4.05612966882808e-05, |
| "loss": 0.8434354066848755, |
| "step": 660 |
| }, |
| { |
| "epoch": 1.4085106382978723, |
| "grad_norm": 0.44140625, |
| "learning_rate": 4.0450680800276065e-05, |
| "loss": 0.9269939064979553, |
| "step": 662 |
| }, |
| { |
| "epoch": 1.4127659574468086, |
| "grad_norm": 0.62109375, |
| "learning_rate": 4.033996691268693e-05, |
| "loss": 1.1910121440887451, |
| "step": 664 |
| }, |
| { |
| "epoch": 1.4170212765957446, |
| "grad_norm": 0.326171875, |
| "learning_rate": 4.022915746332358e-05, |
| "loss": 0.9383742213249207, |
| "step": 666 |
| }, |
| { |
| "epoch": 1.421276595744681, |
| "grad_norm": 0.333984375, |
| "learning_rate": 4.011825489210038e-05, |
| "loss": 0.9260106086730957, |
| "step": 668 |
| }, |
| { |
| "epoch": 1.425531914893617, |
| "grad_norm": 0.5, |
| "learning_rate": 4.000726164098213e-05, |
| "loss": 0.8756862282752991, |
| "step": 670 |
| }, |
| { |
| "epoch": 1.4297872340425533, |
| "grad_norm": 0.39453125, |
| "learning_rate": 3.9896180153930326e-05, |
| "loss": 0.9409113526344299, |
| "step": 672 |
| }, |
| { |
| "epoch": 1.4340425531914893, |
| "grad_norm": 0.376953125, |
| "learning_rate": 3.978501287684933e-05, |
| "loss": 0.9107382893562317, |
| "step": 674 |
| }, |
| { |
| "epoch": 1.4382978723404256, |
| "grad_norm": 0.48046875, |
| "learning_rate": 3.9673762257532496e-05, |
| "loss": 0.9422937035560608, |
| "step": 676 |
| }, |
| { |
| "epoch": 1.4425531914893617, |
| "grad_norm": 0.400390625, |
| "learning_rate": 3.9562430745608315e-05, |
| "loss": 0.9622249007225037, |
| "step": 678 |
| }, |
| { |
| "epoch": 1.4468085106382977, |
| "grad_norm": 0.416015625, |
| "learning_rate": 3.945102079248645e-05, |
| "loss": 0.9257479906082153, |
| "step": 680 |
| }, |
| { |
| "epoch": 1.451063829787234, |
| "grad_norm": 0.361328125, |
| "learning_rate": 3.9339534851303746e-05, |
| "loss": 0.8421862125396729, |
| "step": 682 |
| }, |
| { |
| "epoch": 1.4553191489361703, |
| "grad_norm": 0.53125, |
| "learning_rate": 3.9227975376870264e-05, |
| "loss": 0.8206483125686646, |
| "step": 684 |
| }, |
| { |
| "epoch": 1.4595744680851064, |
| "grad_norm": 0.96484375, |
| "learning_rate": 3.911634482561514e-05, |
| "loss": 0.8526564836502075, |
| "step": 686 |
| }, |
| { |
| "epoch": 1.4638297872340424, |
| "grad_norm": 0.5625, |
| "learning_rate": 3.900464565553259e-05, |
| "loss": 1.036257266998291, |
| "step": 688 |
| }, |
| { |
| "epoch": 1.4680851063829787, |
| "grad_norm": 0.60546875, |
| "learning_rate": 3.889288032612775e-05, |
| "loss": 0.6551811695098877, |
| "step": 690 |
| }, |
| { |
| "epoch": 1.472340425531915, |
| "grad_norm": 0.376953125, |
| "learning_rate": 3.878105129836252e-05, |
| "loss": 0.9458101391792297, |
| "step": 692 |
| }, |
| { |
| "epoch": 1.476595744680851, |
| "grad_norm": 0.345703125, |
| "learning_rate": 3.8669161034601336e-05, |
| "loss": 0.8864946961402893, |
| "step": 694 |
| }, |
| { |
| "epoch": 1.4808510638297872, |
| "grad_norm": 0.435546875, |
| "learning_rate": 3.8557211998557025e-05, |
| "loss": 0.9007453322410583, |
| "step": 696 |
| }, |
| { |
| "epoch": 1.4851063829787234, |
| "grad_norm": 0.373046875, |
| "learning_rate": 3.844520665523654e-05, |
| "loss": 0.7618290185928345, |
| "step": 698 |
| }, |
| { |
| "epoch": 1.4893617021276595, |
| "grad_norm": 0.625, |
| "learning_rate": 3.8333147470886604e-05, |
| "loss": 0.7166705131530762, |
| "step": 700 |
| }, |
| { |
| "epoch": 1.4936170212765958, |
| "grad_norm": 0.63671875, |
| "learning_rate": 3.822103691293953e-05, |
| "loss": 0.9165350794792175, |
| "step": 702 |
| }, |
| { |
| "epoch": 1.4978723404255319, |
| "grad_norm": 0.3515625, |
| "learning_rate": 3.810887744995878e-05, |
| "loss": 0.6734737157821655, |
| "step": 704 |
| }, |
| { |
| "epoch": 1.5021276595744681, |
| "grad_norm": 0.76171875, |
| "learning_rate": 3.7996671551584686e-05, |
| "loss": 1.038259506225586, |
| "step": 706 |
| }, |
| { |
| "epoch": 1.5063829787234042, |
| "grad_norm": 0.58203125, |
| "learning_rate": 3.788442168848002e-05, |
| "loss": 1.081926941871643, |
| "step": 708 |
| }, |
| { |
| "epoch": 1.5106382978723403, |
| "grad_norm": 0.375, |
| "learning_rate": 3.777213033227562e-05, |
| "loss": 0.9264968633651733, |
| "step": 710 |
| }, |
| { |
| "epoch": 1.5148936170212766, |
| "grad_norm": 0.48828125, |
| "learning_rate": 3.765979995551599e-05, |
| "loss": 0.9294235110282898, |
| "step": 712 |
| }, |
| { |
| "epoch": 1.5191489361702128, |
| "grad_norm": 0.490234375, |
| "learning_rate": 3.7547433031604774e-05, |
| "loss": 0.9344536662101746, |
| "step": 714 |
| }, |
| { |
| "epoch": 1.523404255319149, |
| "grad_norm": 1.0390625, |
| "learning_rate": 3.7435032034750385e-05, |
| "loss": 0.9146227240562439, |
| "step": 716 |
| }, |
| { |
| "epoch": 1.527659574468085, |
| "grad_norm": 0.412109375, |
| "learning_rate": 3.73225994399115e-05, |
| "loss": 0.9178793430328369, |
| "step": 718 |
| }, |
| { |
| "epoch": 1.5319148936170213, |
| "grad_norm": 0.337890625, |
| "learning_rate": 3.721013772274251e-05, |
| "loss": 0.8365082144737244, |
| "step": 720 |
| }, |
| { |
| "epoch": 1.5361702127659576, |
| "grad_norm": 0.66796875, |
| "learning_rate": 3.7097649359539075e-05, |
| "loss": 0.8762301802635193, |
| "step": 722 |
| }, |
| { |
| "epoch": 1.5404255319148936, |
| "grad_norm": 0.37890625, |
| "learning_rate": 3.6985136827183575e-05, |
| "loss": 0.8902574181556702, |
| "step": 724 |
| }, |
| { |
| "epoch": 1.5446808510638297, |
| "grad_norm": 0.5, |
| "learning_rate": 3.6872602603090564e-05, |
| "loss": 0.9932896494865417, |
| "step": 726 |
| }, |
| { |
| "epoch": 1.548936170212766, |
| "grad_norm": 0.443359375, |
| "learning_rate": 3.6760049165152256e-05, |
| "loss": 0.896535336971283, |
| "step": 728 |
| }, |
| { |
| "epoch": 1.5531914893617023, |
| "grad_norm": 0.388671875, |
| "learning_rate": 3.6647478991683885e-05, |
| "loss": 1.0637593269348145, |
| "step": 730 |
| }, |
| { |
| "epoch": 1.5574468085106383, |
| "grad_norm": 0.361328125, |
| "learning_rate": 3.6534894561369214e-05, |
| "loss": 0.7936927676200867, |
| "step": 732 |
| }, |
| { |
| "epoch": 1.5617021276595744, |
| "grad_norm": 0.578125, |
| "learning_rate": 3.642229835320593e-05, |
| "loss": 0.8339595794677734, |
| "step": 734 |
| }, |
| { |
| "epoch": 1.5659574468085107, |
| "grad_norm": 0.38671875, |
| "learning_rate": 3.630969284645103e-05, |
| "loss": 0.9330006837844849, |
| "step": 736 |
| }, |
| { |
| "epoch": 1.570212765957447, |
| "grad_norm": 0.42578125, |
| "learning_rate": 3.6197080520566315e-05, |
| "loss": 0.8958064913749695, |
| "step": 738 |
| }, |
| { |
| "epoch": 1.574468085106383, |
| "grad_norm": 0.333984375, |
| "learning_rate": 3.6084463855163666e-05, |
| "loss": 0.8008121848106384, |
| "step": 740 |
| }, |
| { |
| "epoch": 1.578723404255319, |
| "grad_norm": 0.373046875, |
| "learning_rate": 3.597184532995055e-05, |
| "loss": 0.950303316116333, |
| "step": 742 |
| }, |
| { |
| "epoch": 1.5829787234042554, |
| "grad_norm": 0.52734375, |
| "learning_rate": 3.58592274246754e-05, |
| "loss": 0.9128738641738892, |
| "step": 744 |
| }, |
| { |
| "epoch": 1.5872340425531914, |
| "grad_norm": 0.40234375, |
| "learning_rate": 3.574661261907297e-05, |
| "loss": 0.8636385798454285, |
| "step": 746 |
| }, |
| { |
| "epoch": 1.5914893617021275, |
| "grad_norm": 0.5625, |
| "learning_rate": 3.563400339280979e-05, |
| "loss": 0.8232119083404541, |
| "step": 748 |
| }, |
| { |
| "epoch": 1.5957446808510638, |
| "grad_norm": 0.3984375, |
| "learning_rate": 3.5521402225429485e-05, |
| "loss": 0.8971787095069885, |
| "step": 750 |
| }, |
| { |
| "epoch": 1.6, |
| "grad_norm": 1.28125, |
| "learning_rate": 3.540881159629831e-05, |
| "loss": 0.9066179394721985, |
| "step": 752 |
| }, |
| { |
| "epoch": 1.6042553191489362, |
| "grad_norm": 0.32421875, |
| "learning_rate": 3.529623398455042e-05, |
| "loss": 0.8429763913154602, |
| "step": 754 |
| }, |
| { |
| "epoch": 1.6085106382978722, |
| "grad_norm": 1.25, |
| "learning_rate": 3.5183671869033355e-05, |
| "loss": 0.8769048452377319, |
| "step": 756 |
| }, |
| { |
| "epoch": 1.6127659574468085, |
| "grad_norm": 0.400390625, |
| "learning_rate": 3.507112772825345e-05, |
| "loss": 1.064902424812317, |
| "step": 758 |
| }, |
| { |
| "epoch": 1.6170212765957448, |
| "grad_norm": 0.345703125, |
| "learning_rate": 3.4958604040321254e-05, |
| "loss": 1.0025050640106201, |
| "step": 760 |
| }, |
| { |
| "epoch": 1.6212765957446809, |
| "grad_norm": 0.3671875, |
| "learning_rate": 3.4846103282896956e-05, |
| "loss": 0.9082741737365723, |
| "step": 762 |
| }, |
| { |
| "epoch": 1.625531914893617, |
| "grad_norm": 0.482421875, |
| "learning_rate": 3.473362793313583e-05, |
| "loss": 1.00468909740448, |
| "step": 764 |
| }, |
| { |
| "epoch": 1.6297872340425532, |
| "grad_norm": 0.62890625, |
| "learning_rate": 3.4621180467633736e-05, |
| "loss": 1.1201874017715454, |
| "step": 766 |
| }, |
| { |
| "epoch": 1.6340425531914895, |
| "grad_norm": 0.73828125, |
| "learning_rate": 3.4508763362372495e-05, |
| "loss": 0.8420827388763428, |
| "step": 768 |
| }, |
| { |
| "epoch": 1.6382978723404256, |
| "grad_norm": 0.4453125, |
| "learning_rate": 3.4396379092665476e-05, |
| "loss": 0.9371001720428467, |
| "step": 770 |
| }, |
| { |
| "epoch": 1.6425531914893616, |
| "grad_norm": 0.375, |
| "learning_rate": 3.428403013310303e-05, |
| "loss": 1.0956753492355347, |
| "step": 772 |
| }, |
| { |
| "epoch": 1.646808510638298, |
| "grad_norm": 0.94921875, |
| "learning_rate": 3.4171718957497976e-05, |
| "loss": 1.0000417232513428, |
| "step": 774 |
| }, |
| { |
| "epoch": 1.6510638297872342, |
| "grad_norm": 0.51953125, |
| "learning_rate": 3.405944803883121e-05, |
| "loss": 0.7758415937423706, |
| "step": 776 |
| }, |
| { |
| "epoch": 1.65531914893617, |
| "grad_norm": 0.353515625, |
| "learning_rate": 3.394721984919721e-05, |
| "loss": 0.9292746186256409, |
| "step": 778 |
| }, |
| { |
| "epoch": 1.6595744680851063, |
| "grad_norm": 0.42578125, |
| "learning_rate": 3.383503685974956e-05, |
| "loss": 0.8925681114196777, |
| "step": 780 |
| }, |
| { |
| "epoch": 1.6638297872340426, |
| "grad_norm": 0.3203125, |
| "learning_rate": 3.3722901540646634e-05, |
| "loss": 0.9213247299194336, |
| "step": 782 |
| }, |
| { |
| "epoch": 1.6680851063829787, |
| "grad_norm": 0.57421875, |
| "learning_rate": 3.361081636099712e-05, |
| "loss": 1.0045268535614014, |
| "step": 784 |
| }, |
| { |
| "epoch": 1.6723404255319148, |
| "grad_norm": 1.4453125, |
| "learning_rate": 3.34987837888057e-05, |
| "loss": 1.0464414358139038, |
| "step": 786 |
| }, |
| { |
| "epoch": 1.676595744680851, |
| "grad_norm": 0.9375, |
| "learning_rate": 3.338680629091867e-05, |
| "loss": 0.7823745608329773, |
| "step": 788 |
| }, |
| { |
| "epoch": 1.6808510638297873, |
| "grad_norm": 0.5703125, |
| "learning_rate": 3.327488633296967e-05, |
| "loss": 1.0422827005386353, |
| "step": 790 |
| }, |
| { |
| "epoch": 1.6851063829787234, |
| "grad_norm": 0.62890625, |
| "learning_rate": 3.316302637932537e-05, |
| "loss": 0.8115458488464355, |
| "step": 792 |
| }, |
| { |
| "epoch": 1.6893617021276595, |
| "grad_norm": 0.38671875, |
| "learning_rate": 3.305122889303116e-05, |
| "loss": 1.0222609043121338, |
| "step": 794 |
| }, |
| { |
| "epoch": 1.6936170212765957, |
| "grad_norm": 0.54296875, |
| "learning_rate": 3.2939496335757004e-05, |
| "loss": 0.9007856249809265, |
| "step": 796 |
| }, |
| { |
| "epoch": 1.697872340425532, |
| "grad_norm": 0.498046875, |
| "learning_rate": 3.28278311677432e-05, |
| "loss": 0.9559044241905212, |
| "step": 798 |
| }, |
| { |
| "epoch": 1.702127659574468, |
| "grad_norm": 0.41015625, |
| "learning_rate": 3.271623584774616e-05, |
| "loss": 0.9334125518798828, |
| "step": 800 |
| }, |
| { |
| "epoch": 1.7063829787234042, |
| "grad_norm": 0.49609375, |
| "learning_rate": 3.260471283298434e-05, |
| "loss": 0.9849218130111694, |
| "step": 802 |
| }, |
| { |
| "epoch": 1.7106382978723405, |
| "grad_norm": 1.0078125, |
| "learning_rate": 3.249326457908411e-05, |
| "loss": 0.8784253001213074, |
| "step": 804 |
| }, |
| { |
| "epoch": 1.7148936170212767, |
| "grad_norm": 0.482421875, |
| "learning_rate": 3.2381893540025676e-05, |
| "loss": 0.9033365845680237, |
| "step": 806 |
| }, |
| { |
| "epoch": 1.7191489361702128, |
| "grad_norm": 1.0234375, |
| "learning_rate": 3.227060216808902e-05, |
| "loss": 1.0640370845794678, |
| "step": 808 |
| }, |
| { |
| "epoch": 1.7234042553191489, |
| "grad_norm": 1.2734375, |
| "learning_rate": 3.2159392913799974e-05, |
| "loss": 0.8058955669403076, |
| "step": 810 |
| }, |
| { |
| "epoch": 1.7276595744680852, |
| "grad_norm": 0.46875, |
| "learning_rate": 3.2048268225876204e-05, |
| "loss": 0.9892784357070923, |
| "step": 812 |
| }, |
| { |
| "epoch": 1.7319148936170212, |
| "grad_norm": 0.63671875, |
| "learning_rate": 3.1937230551173295e-05, |
| "loss": 0.7961447834968567, |
| "step": 814 |
| }, |
| { |
| "epoch": 1.7361702127659573, |
| "grad_norm": 0.318359375, |
| "learning_rate": 3.182628233463091e-05, |
| "loss": 0.9170913100242615, |
| "step": 816 |
| }, |
| { |
| "epoch": 1.7404255319148936, |
| "grad_norm": 0.419921875, |
| "learning_rate": 3.1715426019218936e-05, |
| "loss": 0.9619283080101013, |
| "step": 818 |
| }, |
| { |
| "epoch": 1.7446808510638299, |
| "grad_norm": 0.390625, |
| "learning_rate": 3.1604664045883653e-05, |
| "loss": 0.9417982697486877, |
| "step": 820 |
| }, |
| { |
| "epoch": 1.748936170212766, |
| "grad_norm": 0.453125, |
| "learning_rate": 3.1493998853494055e-05, |
| "loss": 0.8605011105537415, |
| "step": 822 |
| }, |
| { |
| "epoch": 1.753191489361702, |
| "grad_norm": 0.41015625, |
| "learning_rate": 3.1383432878788086e-05, |
| "loss": 0.9855290651321411, |
| "step": 824 |
| }, |
| { |
| "epoch": 1.7574468085106383, |
| "grad_norm": 0.482421875, |
| "learning_rate": 3.127296855631906e-05, |
| "loss": 1.0079026222229004, |
| "step": 826 |
| }, |
| { |
| "epoch": 1.7617021276595746, |
| "grad_norm": 0.49609375, |
| "learning_rate": 3.116260831840196e-05, |
| "loss": 0.9938555955886841, |
| "step": 828 |
| }, |
| { |
| "epoch": 1.7659574468085106, |
| "grad_norm": 3.65625, |
| "learning_rate": 3.105235459505996e-05, |
| "loss": 0.930496871471405, |
| "step": 830 |
| }, |
| { |
| "epoch": 1.7702127659574467, |
| "grad_norm": 0.3203125, |
| "learning_rate": 3.0942209813970894e-05, |
| "loss": 0.9760335683822632, |
| "step": 832 |
| }, |
| { |
| "epoch": 1.774468085106383, |
| "grad_norm": 0.431640625, |
| "learning_rate": 3.0832176400413745e-05, |
| "loss": 0.9947340488433838, |
| "step": 834 |
| }, |
| { |
| "epoch": 1.7787234042553193, |
| "grad_norm": 0.84765625, |
| "learning_rate": 3.072225677721537e-05, |
| "loss": 0.994687020778656, |
| "step": 836 |
| }, |
| { |
| "epoch": 1.7829787234042553, |
| "grad_norm": 0.33984375, |
| "learning_rate": 3.0612453364697025e-05, |
| "loss": 0.6977970004081726, |
| "step": 838 |
| }, |
| { |
| "epoch": 1.7872340425531914, |
| "grad_norm": 0.94140625, |
| "learning_rate": 3.050276858062112e-05, |
| "loss": 0.9086024761199951, |
| "step": 840 |
| }, |
| { |
| "epoch": 1.7914893617021277, |
| "grad_norm": 0.390625, |
| "learning_rate": 3.039320484013799e-05, |
| "loss": 0.7375553250312805, |
| "step": 842 |
| }, |
| { |
| "epoch": 1.795744680851064, |
| "grad_norm": 1.234375, |
| "learning_rate": 3.028376455573274e-05, |
| "loss": 0.9830509424209595, |
| "step": 844 |
| }, |
| { |
| "epoch": 1.8, |
| "grad_norm": 0.365234375, |
| "learning_rate": 3.0174450137172063e-05, |
| "loss": 0.8254852890968323, |
| "step": 846 |
| }, |
| { |
| "epoch": 1.804255319148936, |
| "grad_norm": 0.55859375, |
| "learning_rate": 3.0065263991451205e-05, |
| "loss": 0.865581750869751, |
| "step": 848 |
| }, |
| { |
| "epoch": 1.8085106382978724, |
| "grad_norm": 0.56640625, |
| "learning_rate": 2.9956208522740998e-05, |
| "loss": 0.9538697600364685, |
| "step": 850 |
| }, |
| { |
| "epoch": 1.8127659574468085, |
| "grad_norm": 0.73828125, |
| "learning_rate": 2.9847286132334884e-05, |
| "loss": 0.9136937856674194, |
| "step": 852 |
| }, |
| { |
| "epoch": 1.8170212765957445, |
| "grad_norm": 0.59765625, |
| "learning_rate": 2.9738499218596033e-05, |
| "loss": 0.9238496422767639, |
| "step": 854 |
| }, |
| { |
| "epoch": 1.8212765957446808, |
| "grad_norm": 0.7890625, |
| "learning_rate": 2.962985017690459e-05, |
| "loss": 0.870966374874115, |
| "step": 856 |
| }, |
| { |
| "epoch": 1.825531914893617, |
| "grad_norm": 0.328125, |
| "learning_rate": 2.9521341399604866e-05, |
| "loss": 0.9596646428108215, |
| "step": 858 |
| }, |
| { |
| "epoch": 1.8297872340425532, |
| "grad_norm": 0.80859375, |
| "learning_rate": 2.9412975275952698e-05, |
| "loss": 0.8268457651138306, |
| "step": 860 |
| }, |
| { |
| "epoch": 1.8340425531914892, |
| "grad_norm": 0.58984375, |
| "learning_rate": 2.9304754192062825e-05, |
| "loss": 0.9566388130187988, |
| "step": 862 |
| }, |
| { |
| "epoch": 1.8382978723404255, |
| "grad_norm": 0.3515625, |
| "learning_rate": 2.919668053085637e-05, |
| "loss": 0.9026366472244263, |
| "step": 864 |
| }, |
| { |
| "epoch": 1.8425531914893618, |
| "grad_norm": 1.5859375, |
| "learning_rate": 2.908875667200835e-05, |
| "loss": 1.1470530033111572, |
| "step": 866 |
| }, |
| { |
| "epoch": 1.8468085106382979, |
| "grad_norm": 0.53515625, |
| "learning_rate": 2.898098499189525e-05, |
| "loss": 0.9321303367614746, |
| "step": 868 |
| }, |
| { |
| "epoch": 1.851063829787234, |
| "grad_norm": 0.462890625, |
| "learning_rate": 2.887336786354277e-05, |
| "loss": 0.8843462467193604, |
| "step": 870 |
| }, |
| { |
| "epoch": 1.8553191489361702, |
| "grad_norm": 0.412109375, |
| "learning_rate": 2.8765907656573538e-05, |
| "loss": 0.9543187022209167, |
| "step": 872 |
| }, |
| { |
| "epoch": 1.8595744680851065, |
| "grad_norm": 0.40234375, |
| "learning_rate": 2.8658606737154885e-05, |
| "loss": 0.9508803486824036, |
| "step": 874 |
| }, |
| { |
| "epoch": 1.8638297872340426, |
| "grad_norm": 0.953125, |
| "learning_rate": 2.8551467467946817e-05, |
| "loss": 0.7834142446517944, |
| "step": 876 |
| }, |
| { |
| "epoch": 1.8680851063829786, |
| "grad_norm": 0.5703125, |
| "learning_rate": 2.844449220804997e-05, |
| "loss": 0.9817957282066345, |
| "step": 878 |
| }, |
| { |
| "epoch": 1.872340425531915, |
| "grad_norm": 0.5859375, |
| "learning_rate": 2.8337683312953634e-05, |
| "loss": 0.9959681630134583, |
| "step": 880 |
| }, |
| { |
| "epoch": 1.8765957446808512, |
| "grad_norm": 0.40234375, |
| "learning_rate": 2.823104313448392e-05, |
| "loss": 0.8245176672935486, |
| "step": 882 |
| }, |
| { |
| "epoch": 1.8808510638297873, |
| "grad_norm": 0.3203125, |
| "learning_rate": 2.8124574020751983e-05, |
| "loss": 0.9558523297309875, |
| "step": 884 |
| }, |
| { |
| "epoch": 1.8851063829787233, |
| "grad_norm": 0.37890625, |
| "learning_rate": 2.8018278316102283e-05, |
| "loss": 0.8767422437667847, |
| "step": 886 |
| }, |
| { |
| "epoch": 1.8893617021276596, |
| "grad_norm": 0.40234375, |
| "learning_rate": 2.7912158361060976e-05, |
| "loss": 0.966520369052887, |
| "step": 888 |
| }, |
| { |
| "epoch": 1.8936170212765957, |
| "grad_norm": 0.455078125, |
| "learning_rate": 2.7806216492284407e-05, |
| "loss": 0.6318687200546265, |
| "step": 890 |
| }, |
| { |
| "epoch": 1.8978723404255318, |
| "grad_norm": 0.78125, |
| "learning_rate": 2.7700455042507637e-05, |
| "loss": 0.9377596378326416, |
| "step": 892 |
| }, |
| { |
| "epoch": 1.902127659574468, |
| "grad_norm": 0.60546875, |
| "learning_rate": 2.759487634049306e-05, |
| "loss": 1.0316444635391235, |
| "step": 894 |
| }, |
| { |
| "epoch": 1.9063829787234043, |
| "grad_norm": 0.455078125, |
| "learning_rate": 2.7489482710979147e-05, |
| "loss": 0.7911099195480347, |
| "step": 896 |
| }, |
| { |
| "epoch": 1.9106382978723404, |
| "grad_norm": 2.09375, |
| "learning_rate": 2.7384276474629283e-05, |
| "loss": 0.8916456699371338, |
| "step": 898 |
| }, |
| { |
| "epoch": 1.9148936170212765, |
| "grad_norm": 0.53125, |
| "learning_rate": 2.7279259947980615e-05, |
| "loss": 1.0238900184631348, |
| "step": 900 |
| }, |
| { |
| "epoch": 1.9191489361702128, |
| "grad_norm": 0.373046875, |
| "learning_rate": 2.717443544339307e-05, |
| "loss": 0.9721688628196716, |
| "step": 902 |
| }, |
| { |
| "epoch": 1.923404255319149, |
| "grad_norm": 0.423828125, |
| "learning_rate": 2.7069805268998467e-05, |
| "loss": 0.8731375336647034, |
| "step": 904 |
| }, |
| { |
| "epoch": 1.9276595744680851, |
| "grad_norm": 0.482421875, |
| "learning_rate": 2.6965371728649632e-05, |
| "loss": 0.9867290258407593, |
| "step": 906 |
| }, |
| { |
| "epoch": 1.9319148936170212, |
| "grad_norm": 0.484375, |
| "learning_rate": 2.686113712186971e-05, |
| "loss": 0.8383113741874695, |
| "step": 908 |
| }, |
| { |
| "epoch": 1.9361702127659575, |
| "grad_norm": 0.79296875, |
| "learning_rate": 2.6757103743801555e-05, |
| "loss": 0.9159626960754395, |
| "step": 910 |
| }, |
| { |
| "epoch": 1.9404255319148938, |
| "grad_norm": 1.4765625, |
| "learning_rate": 2.665327388515714e-05, |
| "loss": 0.9763681292533875, |
| "step": 912 |
| }, |
| { |
| "epoch": 1.9446808510638298, |
| "grad_norm": 1.0546875, |
| "learning_rate": 2.6549649832167138e-05, |
| "loss": 1.0418541431427002, |
| "step": 914 |
| }, |
| { |
| "epoch": 1.9489361702127659, |
| "grad_norm": 0.73828125, |
| "learning_rate": 2.644623386653059e-05, |
| "loss": 1.002961277961731, |
| "step": 916 |
| }, |
| { |
| "epoch": 1.9531914893617022, |
| "grad_norm": 0.5078125, |
| "learning_rate": 2.6343028265364677e-05, |
| "loss": 0.8668839335441589, |
| "step": 918 |
| }, |
| { |
| "epoch": 1.9574468085106385, |
| "grad_norm": 0.470703125, |
| "learning_rate": 2.6240035301154533e-05, |
| "loss": 1.0359348058700562, |
| "step": 920 |
| }, |
| { |
| "epoch": 1.9617021276595743, |
| "grad_norm": 0.44140625, |
| "learning_rate": 2.6137257241703254e-05, |
| "loss": 0.9270017147064209, |
| "step": 922 |
| }, |
| { |
| "epoch": 1.9659574468085106, |
| "grad_norm": 0.29296875, |
| "learning_rate": 2.6034696350081965e-05, |
| "loss": 0.9452486634254456, |
| "step": 924 |
| }, |
| { |
| "epoch": 1.9702127659574469, |
| "grad_norm": 0.53125, |
| "learning_rate": 2.5932354884579916e-05, |
| "loss": 0.9649438261985779, |
| "step": 926 |
| }, |
| { |
| "epoch": 1.974468085106383, |
| "grad_norm": 0.6171875, |
| "learning_rate": 2.5830235098654857e-05, |
| "loss": 0.7984356880187988, |
| "step": 928 |
| }, |
| { |
| "epoch": 1.978723404255319, |
| "grad_norm": 0.380859375, |
| "learning_rate": 2.5728339240883376e-05, |
| "loss": 1.008255958557129, |
| "step": 930 |
| }, |
| { |
| "epoch": 1.9829787234042553, |
| "grad_norm": 0.359375, |
| "learning_rate": 2.5626669554911353e-05, |
| "loss": 1.021424651145935, |
| "step": 932 |
| }, |
| { |
| "epoch": 1.9872340425531916, |
| "grad_norm": 0.53125, |
| "learning_rate": 2.55252282794046e-05, |
| "loss": 0.8926823735237122, |
| "step": 934 |
| }, |
| { |
| "epoch": 1.9914893617021276, |
| "grad_norm": 0.4140625, |
| "learning_rate": 2.5424017647999574e-05, |
| "loss": 0.7046434283256531, |
| "step": 936 |
| }, |
| { |
| "epoch": 1.9957446808510637, |
| "grad_norm": 0.5078125, |
| "learning_rate": 2.532303988925417e-05, |
| "loss": 1.097957730293274, |
| "step": 938 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 0.90234375, |
| "learning_rate": 2.5222297226598633e-05, |
| "loss": 0.8573867082595825, |
| "step": 940 |
| }, |
| { |
| "epoch": 2.0042553191489363, |
| "grad_norm": 0.455078125, |
| "learning_rate": 2.512179187828667e-05, |
| "loss": 0.6483351588249207, |
| "step": 942 |
| }, |
| { |
| "epoch": 2.008510638297872, |
| "grad_norm": 0.380859375, |
| "learning_rate": 2.5021526057346518e-05, |
| "loss": 0.6100251078605652, |
| "step": 944 |
| }, |
| { |
| "epoch": 2.0127659574468084, |
| "grad_norm": 0.365234375, |
| "learning_rate": 2.4921501971532267e-05, |
| "loss": 0.6785602569580078, |
| "step": 946 |
| }, |
| { |
| "epoch": 2.0170212765957447, |
| "grad_norm": 0.515625, |
| "learning_rate": 2.4821721823275266e-05, |
| "loss": 0.6127380728721619, |
| "step": 948 |
| }, |
| { |
| "epoch": 2.021276595744681, |
| "grad_norm": 2.140625, |
| "learning_rate": 2.472218780963559e-05, |
| "loss": 0.845111072063446, |
| "step": 950 |
| }, |
| { |
| "epoch": 2.025531914893617, |
| "grad_norm": 0.375, |
| "learning_rate": 2.462290212225365e-05, |
| "loss": 0.6631003022193909, |
| "step": 952 |
| }, |
| { |
| "epoch": 2.029787234042553, |
| "grad_norm": 0.435546875, |
| "learning_rate": 2.452386694730197e-05, |
| "loss": 0.5201372504234314, |
| "step": 954 |
| }, |
| { |
| "epoch": 2.0340425531914894, |
| "grad_norm": 0.48046875, |
| "learning_rate": 2.4425084465437053e-05, |
| "loss": 0.7710279226303101, |
| "step": 956 |
| }, |
| { |
| "epoch": 2.0382978723404257, |
| "grad_norm": 0.64453125, |
| "learning_rate": 2.4326556851751346e-05, |
| "loss": 0.6157738566398621, |
| "step": 958 |
| }, |
| { |
| "epoch": 2.0425531914893615, |
| "grad_norm": 0.49609375, |
| "learning_rate": 2.422828627572534e-05, |
| "loss": 0.7093011736869812, |
| "step": 960 |
| }, |
| { |
| "epoch": 2.046808510638298, |
| "grad_norm": 0.79296875, |
| "learning_rate": 2.4130274901179803e-05, |
| "loss": 0.6860568523406982, |
| "step": 962 |
| }, |
| { |
| "epoch": 2.051063829787234, |
| "grad_norm": 1.359375, |
| "learning_rate": 2.4032524886228184e-05, |
| "loss": 0.719150960445404, |
| "step": 964 |
| }, |
| { |
| "epoch": 2.0553191489361704, |
| "grad_norm": 0.6171875, |
| "learning_rate": 2.3935038383229e-05, |
| "loss": 0.6379442811012268, |
| "step": 966 |
| }, |
| { |
| "epoch": 2.0595744680851062, |
| "grad_norm": 0.369140625, |
| "learning_rate": 2.3837817538738525e-05, |
| "loss": 0.7576701641082764, |
| "step": 968 |
| }, |
| { |
| "epoch": 2.0638297872340425, |
| "grad_norm": 0.3671875, |
| "learning_rate": 2.374086449346352e-05, |
| "loss": 0.4662551283836365, |
| "step": 970 |
| }, |
| { |
| "epoch": 2.068085106382979, |
| "grad_norm": 0.57421875, |
| "learning_rate": 2.3644181382214013e-05, |
| "loss": 0.5384807586669922, |
| "step": 972 |
| }, |
| { |
| "epoch": 2.072340425531915, |
| "grad_norm": 0.515625, |
| "learning_rate": 2.3547770333856386e-05, |
| "loss": 0.6011054515838623, |
| "step": 974 |
| }, |
| { |
| "epoch": 2.076595744680851, |
| "grad_norm": 0.83203125, |
| "learning_rate": 2.345163347126647e-05, |
| "loss": 0.5131646990776062, |
| "step": 976 |
| }, |
| { |
| "epoch": 2.0808510638297872, |
| "grad_norm": 0.52734375, |
| "learning_rate": 2.3355772911282796e-05, |
| "loss": 0.8288999199867249, |
| "step": 978 |
| }, |
| { |
| "epoch": 2.0851063829787235, |
| "grad_norm": 0.2080078125, |
| "learning_rate": 2.3260190764659976e-05, |
| "loss": 0.47640979290008545, |
| "step": 980 |
| }, |
| { |
| "epoch": 2.0893617021276594, |
| "grad_norm": 0.55859375, |
| "learning_rate": 2.316488913602222e-05, |
| "loss": 0.5666584372520447, |
| "step": 982 |
| }, |
| { |
| "epoch": 2.0936170212765957, |
| "grad_norm": 1.0390625, |
| "learning_rate": 2.3069870123817056e-05, |
| "loss": 0.5758649110794067, |
| "step": 984 |
| }, |
| { |
| "epoch": 2.097872340425532, |
| "grad_norm": 0.314453125, |
| "learning_rate": 2.2975135820269026e-05, |
| "loss": 0.5656971335411072, |
| "step": 986 |
| }, |
| { |
| "epoch": 2.1021276595744682, |
| "grad_norm": 0.6171875, |
| "learning_rate": 2.2880688311333702e-05, |
| "loss": 0.3263399302959442, |
| "step": 988 |
| }, |
| { |
| "epoch": 2.106382978723404, |
| "grad_norm": 0.85546875, |
| "learning_rate": 2.278652967665173e-05, |
| "loss": 0.690028965473175, |
| "step": 990 |
| }, |
| { |
| "epoch": 2.1106382978723404, |
| "grad_norm": 0.7890625, |
| "learning_rate": 2.2692661989502995e-05, |
| "loss": 0.6520633101463318, |
| "step": 992 |
| }, |
| { |
| "epoch": 2.1148936170212767, |
| "grad_norm": 0.36328125, |
| "learning_rate": 2.2599087316761018e-05, |
| "loss": 0.7235679626464844, |
| "step": 994 |
| }, |
| { |
| "epoch": 2.119148936170213, |
| "grad_norm": 0.84375, |
| "learning_rate": 2.2505807718847456e-05, |
| "loss": 0.6303759217262268, |
| "step": 996 |
| }, |
| { |
| "epoch": 2.123404255319149, |
| "grad_norm": 0.65625, |
| "learning_rate": 2.24128252496867e-05, |
| "loss": 0.6719092130661011, |
| "step": 998 |
| }, |
| { |
| "epoch": 2.127659574468085, |
| "grad_norm": 0.5, |
| "learning_rate": 2.2320141956660646e-05, |
| "loss": 0.28417664766311646, |
| "step": 1000 |
| }, |
| { |
| "epoch": 2.1319148936170214, |
| "grad_norm": 0.6171875, |
| "learning_rate": 2.2227759880563626e-05, |
| "loss": 0.7599141597747803, |
| "step": 1002 |
| }, |
| { |
| "epoch": 2.1361702127659576, |
| "grad_norm": 0.58984375, |
| "learning_rate": 2.2135681055557504e-05, |
| "loss": 0.6200002431869507, |
| "step": 1004 |
| }, |
| { |
| "epoch": 2.1404255319148935, |
| "grad_norm": 0.7578125, |
| "learning_rate": 2.2043907509126812e-05, |
| "loss": 0.7208251953125, |
| "step": 1006 |
| }, |
| { |
| "epoch": 2.1446808510638298, |
| "grad_norm": 0.90234375, |
| "learning_rate": 2.1952441262034188e-05, |
| "loss": 0.5411021709442139, |
| "step": 1008 |
| }, |
| { |
| "epoch": 2.148936170212766, |
| "grad_norm": 0.41796875, |
| "learning_rate": 2.1861284328275845e-05, |
| "loss": 0.5026164650917053, |
| "step": 1010 |
| }, |
| { |
| "epoch": 2.153191489361702, |
| "grad_norm": 0.44140625, |
| "learning_rate": 2.1770438715037165e-05, |
| "loss": 0.6443688273429871, |
| "step": 1012 |
| }, |
| { |
| "epoch": 2.157446808510638, |
| "grad_norm": 0.578125, |
| "learning_rate": 2.1679906422648626e-05, |
| "loss": 0.6200368404388428, |
| "step": 1014 |
| }, |
| { |
| "epoch": 2.1617021276595745, |
| "grad_norm": 0.75390625, |
| "learning_rate": 2.1589689444541662e-05, |
| "loss": 0.6573978066444397, |
| "step": 1016 |
| }, |
| { |
| "epoch": 2.1659574468085108, |
| "grad_norm": 1.390625, |
| "learning_rate": 2.1499789767204812e-05, |
| "loss": 0.6872032880783081, |
| "step": 1018 |
| }, |
| { |
| "epoch": 2.1702127659574466, |
| "grad_norm": 2.890625, |
| "learning_rate": 2.1410209370139945e-05, |
| "loss": 0.5005927085876465, |
| "step": 1020 |
| }, |
| { |
| "epoch": 2.174468085106383, |
| "grad_norm": 0.58203125, |
| "learning_rate": 2.1320950225818707e-05, |
| "loss": 0.7184480428695679, |
| "step": 1022 |
| }, |
| { |
| "epoch": 2.178723404255319, |
| "grad_norm": 0.54296875, |
| "learning_rate": 2.1232014299639085e-05, |
| "loss": 0.6513587832450867, |
| "step": 1024 |
| }, |
| { |
| "epoch": 2.1829787234042555, |
| "grad_norm": 0.76171875, |
| "learning_rate": 2.11434035498821e-05, |
| "loss": 0.5126093626022339, |
| "step": 1026 |
| }, |
| { |
| "epoch": 2.1872340425531913, |
| "grad_norm": 0.447265625, |
| "learning_rate": 2.105511992766874e-05, |
| "loss": 0.6408154964447021, |
| "step": 1028 |
| }, |
| { |
| "epoch": 2.1914893617021276, |
| "grad_norm": 0.59765625, |
| "learning_rate": 2.0967165376916945e-05, |
| "loss": 0.7048395872116089, |
| "step": 1030 |
| }, |
| { |
| "epoch": 2.195744680851064, |
| "grad_norm": 0.84375, |
| "learning_rate": 2.0879541834298828e-05, |
| "loss": 0.662340521812439, |
| "step": 1032 |
| }, |
| { |
| "epoch": 2.2, |
| "grad_norm": 0.70703125, |
| "learning_rate": 2.0792251229198035e-05, |
| "loss": 0.7315186858177185, |
| "step": 1034 |
| }, |
| { |
| "epoch": 2.204255319148936, |
| "grad_norm": 0.435546875, |
| "learning_rate": 2.0705295483667274e-05, |
| "loss": 0.47347530722618103, |
| "step": 1036 |
| }, |
| { |
| "epoch": 2.2085106382978723, |
| "grad_norm": 0.79296875, |
| "learning_rate": 2.0618676512385965e-05, |
| "loss": 0.5597242712974548, |
| "step": 1038 |
| }, |
| { |
| "epoch": 2.2127659574468086, |
| "grad_norm": 0.8515625, |
| "learning_rate": 2.0532396222618083e-05, |
| "loss": 0.6009190082550049, |
| "step": 1040 |
| }, |
| { |
| "epoch": 2.217021276595745, |
| "grad_norm": 0.4140625, |
| "learning_rate": 2.0446456514170166e-05, |
| "loss": 0.7078320384025574, |
| "step": 1042 |
| }, |
| { |
| "epoch": 2.2212765957446807, |
| "grad_norm": 0.6171875, |
| "learning_rate": 2.0360859279349523e-05, |
| "loss": 0.6109620928764343, |
| "step": 1044 |
| }, |
| { |
| "epoch": 2.225531914893617, |
| "grad_norm": 1.8984375, |
| "learning_rate": 2.0275606402922496e-05, |
| "loss": 0.6881995797157288, |
| "step": 1046 |
| }, |
| { |
| "epoch": 2.2297872340425533, |
| "grad_norm": 0.65625, |
| "learning_rate": 2.0190699762073015e-05, |
| "loss": 0.8076979517936707, |
| "step": 1048 |
| }, |
| { |
| "epoch": 2.2340425531914896, |
| "grad_norm": 0.70703125, |
| "learning_rate": 2.010614122636125e-05, |
| "loss": 0.5962254405021667, |
| "step": 1050 |
| }, |
| { |
| "epoch": 2.2382978723404254, |
| "grad_norm": 0.494140625, |
| "learning_rate": 2.002193265768241e-05, |
| "loss": 0.5154112577438354, |
| "step": 1052 |
| }, |
| { |
| "epoch": 2.2425531914893617, |
| "grad_norm": 0.478515625, |
| "learning_rate": 1.9938075910225816e-05, |
| "loss": 0.5857576131820679, |
| "step": 1054 |
| }, |
| { |
| "epoch": 2.246808510638298, |
| "grad_norm": 1.671875, |
| "learning_rate": 1.985457283043401e-05, |
| "loss": 0.7445710301399231, |
| "step": 1056 |
| }, |
| { |
| "epoch": 2.251063829787234, |
| "grad_norm": 0.5234375, |
| "learning_rate": 1.977142525696214e-05, |
| "loss": 0.4012032449245453, |
| "step": 1058 |
| }, |
| { |
| "epoch": 2.25531914893617, |
| "grad_norm": 0.419921875, |
| "learning_rate": 1.9688635020637438e-05, |
| "loss": 0.7297635078430176, |
| "step": 1060 |
| }, |
| { |
| "epoch": 2.2595744680851064, |
| "grad_norm": 0.458984375, |
| "learning_rate": 1.9606203944418924e-05, |
| "loss": 0.5968733429908752, |
| "step": 1062 |
| }, |
| { |
| "epoch": 2.2638297872340427, |
| "grad_norm": 0.498046875, |
| "learning_rate": 1.9524133843357294e-05, |
| "loss": 0.7591136693954468, |
| "step": 1064 |
| }, |
| { |
| "epoch": 2.2680851063829786, |
| "grad_norm": 0.421875, |
| "learning_rate": 1.9442426524554893e-05, |
| "loss": 0.6319488286972046, |
| "step": 1066 |
| }, |
| { |
| "epoch": 2.272340425531915, |
| "grad_norm": 0.58984375, |
| "learning_rate": 1.9361083787126e-05, |
| "loss": 0.6187411546707153, |
| "step": 1068 |
| }, |
| { |
| "epoch": 2.276595744680851, |
| "grad_norm": 0.49609375, |
| "learning_rate": 1.9280107422157143e-05, |
| "loss": 0.6891050934791565, |
| "step": 1070 |
| }, |
| { |
| "epoch": 2.2808510638297874, |
| "grad_norm": 0.3984375, |
| "learning_rate": 1.9199499212667688e-05, |
| "loss": 0.507449209690094, |
| "step": 1072 |
| }, |
| { |
| "epoch": 2.2851063829787233, |
| "grad_norm": 0.515625, |
| "learning_rate": 1.9119260933570603e-05, |
| "loss": 0.563790500164032, |
| "step": 1074 |
| }, |
| { |
| "epoch": 2.2893617021276595, |
| "grad_norm": 0.48828125, |
| "learning_rate": 1.903939435163335e-05, |
| "loss": 0.5918843150138855, |
| "step": 1076 |
| }, |
| { |
| "epoch": 2.293617021276596, |
| "grad_norm": 0.6640625, |
| "learning_rate": 1.8959901225439e-05, |
| "loss": 0.6510270833969116, |
| "step": 1078 |
| }, |
| { |
| "epoch": 2.297872340425532, |
| "grad_norm": 0.6796875, |
| "learning_rate": 1.888078330534744e-05, |
| "loss": 0.4389875531196594, |
| "step": 1080 |
| }, |
| { |
| "epoch": 2.302127659574468, |
| "grad_norm": 1.78125, |
| "learning_rate": 1.880204233345696e-05, |
| "loss": 0.3430854082107544, |
| "step": 1082 |
| }, |
| { |
| "epoch": 2.3063829787234043, |
| "grad_norm": 0.51953125, |
| "learning_rate": 1.8723680043565798e-05, |
| "loss": 0.7696226835250854, |
| "step": 1084 |
| }, |
| { |
| "epoch": 2.3106382978723405, |
| "grad_norm": 0.453125, |
| "learning_rate": 1.8645698161133972e-05, |
| "loss": 0.5286341905593872, |
| "step": 1086 |
| }, |
| { |
| "epoch": 2.3148936170212764, |
| "grad_norm": 0.76953125, |
| "learning_rate": 1.8568098403245336e-05, |
| "loss": 0.567283570766449, |
| "step": 1088 |
| }, |
| { |
| "epoch": 2.3191489361702127, |
| "grad_norm": 0.439453125, |
| "learning_rate": 1.8490882478569716e-05, |
| "loss": 0.5737777352333069, |
| "step": 1090 |
| }, |
| { |
| "epoch": 2.323404255319149, |
| "grad_norm": 0.54296875, |
| "learning_rate": 1.8414052087325308e-05, |
| "loss": 0.728583037853241, |
| "step": 1092 |
| }, |
| { |
| "epoch": 2.3276595744680852, |
| "grad_norm": 0.408203125, |
| "learning_rate": 1.8337608921241267e-05, |
| "loss": 0.600398600101471, |
| "step": 1094 |
| }, |
| { |
| "epoch": 2.331914893617021, |
| "grad_norm": 0.609375, |
| "learning_rate": 1.8261554663520416e-05, |
| "loss": 0.6284059286117554, |
| "step": 1096 |
| }, |
| { |
| "epoch": 2.3361702127659574, |
| "grad_norm": 0.71875, |
| "learning_rate": 1.8185890988802214e-05, |
| "loss": 0.6262116432189941, |
| "step": 1098 |
| }, |
| { |
| "epoch": 2.3404255319148937, |
| "grad_norm": 1.0703125, |
| "learning_rate": 1.8110619563125844e-05, |
| "loss": 0.5292909741401672, |
| "step": 1100 |
| }, |
| { |
| "epoch": 2.34468085106383, |
| "grad_norm": 0.44140625, |
| "learning_rate": 1.8035742043893575e-05, |
| "loss": 0.625741720199585, |
| "step": 1102 |
| }, |
| { |
| "epoch": 2.348936170212766, |
| "grad_norm": 0.416015625, |
| "learning_rate": 1.796126007983425e-05, |
| "loss": 0.7988000512123108, |
| "step": 1104 |
| }, |
| { |
| "epoch": 2.353191489361702, |
| "grad_norm": 0.6640625, |
| "learning_rate": 1.7887175310966956e-05, |
| "loss": 0.6517726182937622, |
| "step": 1106 |
| }, |
| { |
| "epoch": 2.3574468085106384, |
| "grad_norm": 0.96875, |
| "learning_rate": 1.7813489368564965e-05, |
| "loss": 0.6426142454147339, |
| "step": 1108 |
| }, |
| { |
| "epoch": 2.3617021276595747, |
| "grad_norm": 0.474609375, |
| "learning_rate": 1.7740203875119755e-05, |
| "loss": 0.5290454626083374, |
| "step": 1110 |
| }, |
| { |
| "epoch": 2.3659574468085105, |
| "grad_norm": 0.578125, |
| "learning_rate": 1.7667320444305326e-05, |
| "loss": 0.5435438752174377, |
| "step": 1112 |
| }, |
| { |
| "epoch": 2.370212765957447, |
| "grad_norm": 0.59375, |
| "learning_rate": 1.7594840680942667e-05, |
| "loss": 0.4323336184024811, |
| "step": 1114 |
| }, |
| { |
| "epoch": 2.374468085106383, |
| "grad_norm": 0.431640625, |
| "learning_rate": 1.752276618096441e-05, |
| "loss": 0.5053521394729614, |
| "step": 1116 |
| }, |
| { |
| "epoch": 2.378723404255319, |
| "grad_norm": 1.1640625, |
| "learning_rate": 1.7451098531379666e-05, |
| "loss": 0.63356614112854, |
| "step": 1118 |
| }, |
| { |
| "epoch": 2.382978723404255, |
| "grad_norm": 0.58984375, |
| "learning_rate": 1.7379839310239118e-05, |
| "loss": 0.7377380728721619, |
| "step": 1120 |
| }, |
| { |
| "epoch": 2.3872340425531915, |
| "grad_norm": 0.9296875, |
| "learning_rate": 1.7308990086600258e-05, |
| "loss": 0.7407448887825012, |
| "step": 1122 |
| }, |
| { |
| "epoch": 2.391489361702128, |
| "grad_norm": 0.396484375, |
| "learning_rate": 1.7238552420492854e-05, |
| "loss": 0.5100683569908142, |
| "step": 1124 |
| }, |
| { |
| "epoch": 2.395744680851064, |
| "grad_norm": 0.52734375, |
| "learning_rate": 1.716852786288455e-05, |
| "loss": 0.6132201552391052, |
| "step": 1126 |
| }, |
| { |
| "epoch": 2.4, |
| "grad_norm": 0.47265625, |
| "learning_rate": 1.709891795564679e-05, |
| "loss": 0.5412701368331909, |
| "step": 1128 |
| }, |
| { |
| "epoch": 2.404255319148936, |
| "grad_norm": 0.453125, |
| "learning_rate": 1.7029724231520792e-05, |
| "loss": 0.8436723351478577, |
| "step": 1130 |
| }, |
| { |
| "epoch": 2.4085106382978725, |
| "grad_norm": 0.4375, |
| "learning_rate": 1.696094821408385e-05, |
| "loss": 0.6744006872177124, |
| "step": 1132 |
| }, |
| { |
| "epoch": 2.4127659574468083, |
| "grad_norm": 1.1171875, |
| "learning_rate": 1.6892591417715775e-05, |
| "loss": 0.7410330176353455, |
| "step": 1134 |
| }, |
| { |
| "epoch": 2.4170212765957446, |
| "grad_norm": 1.3984375, |
| "learning_rate": 1.682465534756555e-05, |
| "loss": 0.6016606092453003, |
| "step": 1136 |
| }, |
| { |
| "epoch": 2.421276595744681, |
| "grad_norm": 0.4921875, |
| "learning_rate": 1.6757141499518153e-05, |
| "loss": 0.4425470530986786, |
| "step": 1138 |
| }, |
| { |
| "epoch": 2.425531914893617, |
| "grad_norm": 0.546875, |
| "learning_rate": 1.6690051360161673e-05, |
| "loss": 0.7017032504081726, |
| "step": 1140 |
| }, |
| { |
| "epoch": 2.429787234042553, |
| "grad_norm": 0.7265625, |
| "learning_rate": 1.6623386406754555e-05, |
| "loss": 0.5560771822929382, |
| "step": 1142 |
| }, |
| { |
| "epoch": 2.4340425531914893, |
| "grad_norm": 1.0390625, |
| "learning_rate": 1.655714810719307e-05, |
| "loss": 0.6498077511787415, |
| "step": 1144 |
| }, |
| { |
| "epoch": 2.4382978723404256, |
| "grad_norm": 0.466796875, |
| "learning_rate": 1.6491337919978978e-05, |
| "loss": 0.6854323148727417, |
| "step": 1146 |
| }, |
| { |
| "epoch": 2.4425531914893615, |
| "grad_norm": 0.86328125, |
| "learning_rate": 1.642595729418745e-05, |
| "loss": 0.4551085829734802, |
| "step": 1148 |
| }, |
| { |
| "epoch": 2.4468085106382977, |
| "grad_norm": 0.76171875, |
| "learning_rate": 1.6361007669435126e-05, |
| "loss": 0.5184612274169922, |
| "step": 1150 |
| }, |
| { |
| "epoch": 2.451063829787234, |
| "grad_norm": 0.5234375, |
| "learning_rate": 1.6296490475848424e-05, |
| "loss": 0.5938658118247986, |
| "step": 1152 |
| }, |
| { |
| "epoch": 2.4553191489361703, |
| "grad_norm": 0.69921875, |
| "learning_rate": 1.623240713403207e-05, |
| "loss": 0.5654492974281311, |
| "step": 1154 |
| }, |
| { |
| "epoch": 2.4595744680851066, |
| "grad_norm": 0.47265625, |
| "learning_rate": 1.6168759055037817e-05, |
| "loss": 0.6496747732162476, |
| "step": 1156 |
| }, |
| { |
| "epoch": 2.4638297872340424, |
| "grad_norm": 1.921875, |
| "learning_rate": 1.610554764033332e-05, |
| "loss": 0.7038140296936035, |
| "step": 1158 |
| }, |
| { |
| "epoch": 2.4680851063829787, |
| "grad_norm": 0.58203125, |
| "learning_rate": 1.6042774281771345e-05, |
| "loss": 0.641715407371521, |
| "step": 1160 |
| }, |
| { |
| "epoch": 2.472340425531915, |
| "grad_norm": 0.80859375, |
| "learning_rate": 1.59804403615591e-05, |
| "loss": 0.5276774168014526, |
| "step": 1162 |
| }, |
| { |
| "epoch": 2.476595744680851, |
| "grad_norm": 0.45703125, |
| "learning_rate": 1.5918547252227793e-05, |
| "loss": 0.6699836850166321, |
| "step": 1164 |
| }, |
| { |
| "epoch": 2.480851063829787, |
| "grad_norm": 0.69140625, |
| "learning_rate": 1.58570963166024e-05, |
| "loss": 0.6293801069259644, |
| "step": 1166 |
| }, |
| { |
| "epoch": 2.4851063829787234, |
| "grad_norm": 0.435546875, |
| "learning_rate": 1.5796088907771674e-05, |
| "loss": 0.6323214769363403, |
| "step": 1168 |
| }, |
| { |
| "epoch": 2.4893617021276597, |
| "grad_norm": 0.55859375, |
| "learning_rate": 1.5735526369058364e-05, |
| "loss": 0.6430104970932007, |
| "step": 1170 |
| }, |
| { |
| "epoch": 2.4936170212765956, |
| "grad_norm": 0.380859375, |
| "learning_rate": 1.5675410033989592e-05, |
| "loss": 0.5185415744781494, |
| "step": 1172 |
| }, |
| { |
| "epoch": 2.497872340425532, |
| "grad_norm": 0.78125, |
| "learning_rate": 1.561574122626754e-05, |
| "loss": 0.47881028056144714, |
| "step": 1174 |
| }, |
| { |
| "epoch": 2.502127659574468, |
| "grad_norm": 0.4921875, |
| "learning_rate": 1.555652125974028e-05, |
| "loss": 0.7108798027038574, |
| "step": 1176 |
| }, |
| { |
| "epoch": 2.506382978723404, |
| "grad_norm": 1.09375, |
| "learning_rate": 1.5497751438372827e-05, |
| "loss": 0.5829865336418152, |
| "step": 1178 |
| }, |
| { |
| "epoch": 2.5106382978723403, |
| "grad_norm": 0.486328125, |
| "learning_rate": 1.543943305621846e-05, |
| "loss": 0.5929511785507202, |
| "step": 1180 |
| }, |
| { |
| "epoch": 2.5148936170212766, |
| "grad_norm": 0.5703125, |
| "learning_rate": 1.538156739739021e-05, |
| "loss": 0.5175199508666992, |
| "step": 1182 |
| }, |
| { |
| "epoch": 2.519148936170213, |
| "grad_norm": 0.298828125, |
| "learning_rate": 1.5324155736032595e-05, |
| "loss": 0.7886143326759338, |
| "step": 1184 |
| }, |
| { |
| "epoch": 2.523404255319149, |
| "grad_norm": 0.416015625, |
| "learning_rate": 1.526719933629355e-05, |
| "loss": 0.6642839908599854, |
| "step": 1186 |
| }, |
| { |
| "epoch": 2.527659574468085, |
| "grad_norm": 0.703125, |
| "learning_rate": 1.5210699452296592e-05, |
| "loss": 0.8588666915893555, |
| "step": 1188 |
| }, |
| { |
| "epoch": 2.5319148936170213, |
| "grad_norm": 0.6171875, |
| "learning_rate": 1.5154657328113233e-05, |
| "loss": 0.7579631209373474, |
| "step": 1190 |
| }, |
| { |
| "epoch": 2.5361702127659576, |
| "grad_norm": 0.46484375, |
| "learning_rate": 1.5099074197735552e-05, |
| "loss": 0.6104500889778137, |
| "step": 1192 |
| }, |
| { |
| "epoch": 2.5404255319148934, |
| "grad_norm": 0.58203125, |
| "learning_rate": 1.504395128504905e-05, |
| "loss": 0.34646666049957275, |
| "step": 1194 |
| }, |
| { |
| "epoch": 2.5446808510638297, |
| "grad_norm": 0.7890625, |
| "learning_rate": 1.4989289803805685e-05, |
| "loss": 0.5463817119598389, |
| "step": 1196 |
| }, |
| { |
| "epoch": 2.548936170212766, |
| "grad_norm": 0.83984375, |
| "learning_rate": 1.4935090957597149e-05, |
| "loss": 0.61968594789505, |
| "step": 1198 |
| }, |
| { |
| "epoch": 2.5531914893617023, |
| "grad_norm": 0.61328125, |
| "learning_rate": 1.4881355939828364e-05, |
| "loss": 0.6242573857307434, |
| "step": 1200 |
| }, |
| { |
| "epoch": 2.5574468085106385, |
| "grad_norm": 0.51953125, |
| "learning_rate": 1.4828085933691223e-05, |
| "loss": 0.6220880150794983, |
| "step": 1202 |
| }, |
| { |
| "epoch": 2.5617021276595744, |
| "grad_norm": 0.58984375, |
| "learning_rate": 1.477528211213852e-05, |
| "loss": 0.6225618124008179, |
| "step": 1204 |
| }, |
| { |
| "epoch": 2.5659574468085107, |
| "grad_norm": 0.3515625, |
| "learning_rate": 1.4722945637858116e-05, |
| "loss": 0.4512316584587097, |
| "step": 1206 |
| }, |
| { |
| "epoch": 2.570212765957447, |
| "grad_norm": 0.9921875, |
| "learning_rate": 1.4671077663247351e-05, |
| "loss": 0.5912795662879944, |
| "step": 1208 |
| }, |
| { |
| "epoch": 2.574468085106383, |
| "grad_norm": 0.4921875, |
| "learning_rate": 1.4619679330387679e-05, |
| "loss": 0.6551855802536011, |
| "step": 1210 |
| }, |
| { |
| "epoch": 2.578723404255319, |
| "grad_norm": 0.62109375, |
| "learning_rate": 1.4568751771019482e-05, |
| "loss": 0.5984119772911072, |
| "step": 1212 |
| }, |
| { |
| "epoch": 2.5829787234042554, |
| "grad_norm": 0.65625, |
| "learning_rate": 1.4518296106517206e-05, |
| "loss": 0.6674041152000427, |
| "step": 1214 |
| }, |
| { |
| "epoch": 2.5872340425531917, |
| "grad_norm": 0.58203125, |
| "learning_rate": 1.4468313447864624e-05, |
| "loss": 0.48521387577056885, |
| "step": 1216 |
| }, |
| { |
| "epoch": 2.5914893617021275, |
| "grad_norm": 3.734375, |
| "learning_rate": 1.441880489563038e-05, |
| "loss": 0.6522884964942932, |
| "step": 1218 |
| }, |
| { |
| "epoch": 2.595744680851064, |
| "grad_norm": 1.109375, |
| "learning_rate": 1.4369771539943776e-05, |
| "loss": 0.7783772945404053, |
| "step": 1220 |
| }, |
| { |
| "epoch": 2.6, |
| "grad_norm": 0.6640625, |
| "learning_rate": 1.4321214460470755e-05, |
| "loss": 0.5732651948928833, |
| "step": 1222 |
| }, |
| { |
| "epoch": 2.604255319148936, |
| "grad_norm": 0.53125, |
| "learning_rate": 1.4273134726390138e-05, |
| "loss": 0.6476449966430664, |
| "step": 1224 |
| }, |
| { |
| "epoch": 2.608510638297872, |
| "grad_norm": 1.3515625, |
| "learning_rate": 1.4225533396370053e-05, |
| "loss": 0.7929537296295166, |
| "step": 1226 |
| }, |
| { |
| "epoch": 2.6127659574468085, |
| "grad_norm": 0.43359375, |
| "learning_rate": 1.4178411518544654e-05, |
| "loss": 0.3998229205608368, |
| "step": 1228 |
| }, |
| { |
| "epoch": 2.617021276595745, |
| "grad_norm": 1.6640625, |
| "learning_rate": 1.413177013049104e-05, |
| "loss": 0.4514097273349762, |
| "step": 1230 |
| }, |
| { |
| "epoch": 2.621276595744681, |
| "grad_norm": 0.9765625, |
| "learning_rate": 1.4085610259206387e-05, |
| "loss": 0.6864388585090637, |
| "step": 1232 |
| }, |
| { |
| "epoch": 2.625531914893617, |
| "grad_norm": 0.421875, |
| "learning_rate": 1.4039932921085362e-05, |
| "loss": 0.5575997233390808, |
| "step": 1234 |
| }, |
| { |
| "epoch": 2.629787234042553, |
| "grad_norm": 0.49609375, |
| "learning_rate": 1.3994739121897718e-05, |
| "loss": 0.653709888458252, |
| "step": 1236 |
| }, |
| { |
| "epoch": 2.6340425531914895, |
| "grad_norm": 0.53125, |
| "learning_rate": 1.3950029856766165e-05, |
| "loss": 0.6980820298194885, |
| "step": 1238 |
| }, |
| { |
| "epoch": 2.6382978723404253, |
| "grad_norm": 0.515625, |
| "learning_rate": 1.3905806110144452e-05, |
| "loss": 0.6501242518424988, |
| "step": 1240 |
| }, |
| { |
| "epoch": 2.6425531914893616, |
| "grad_norm": 0.46875, |
| "learning_rate": 1.3862068855795701e-05, |
| "loss": 0.7544458508491516, |
| "step": 1242 |
| }, |
| { |
| "epoch": 2.646808510638298, |
| "grad_norm": 1.0, |
| "learning_rate": 1.3818819056770932e-05, |
| "loss": 0.5266544222831726, |
| "step": 1244 |
| }, |
| { |
| "epoch": 2.651063829787234, |
| "grad_norm": 0.9140625, |
| "learning_rate": 1.3776057665387907e-05, |
| "loss": 0.6442818641662598, |
| "step": 1246 |
| }, |
| { |
| "epoch": 2.65531914893617, |
| "grad_norm": 0.375, |
| "learning_rate": 1.373378562321012e-05, |
| "loss": 0.6095808148384094, |
| "step": 1248 |
| }, |
| { |
| "epoch": 2.6595744680851063, |
| "grad_norm": 0.453125, |
| "learning_rate": 1.3692003861026083e-05, |
| "loss": 0.7218018770217896, |
| "step": 1250 |
| }, |
| { |
| "epoch": 2.6638297872340426, |
| "grad_norm": 0.52734375, |
| "learning_rate": 1.365071329882883e-05, |
| "loss": 0.6180848479270935, |
| "step": 1252 |
| }, |
| { |
| "epoch": 2.6680851063829785, |
| "grad_norm": 0.73828125, |
| "learning_rate": 1.360991484579566e-05, |
| "loss": 0.4784125089645386, |
| "step": 1254 |
| }, |
| { |
| "epoch": 2.6723404255319148, |
| "grad_norm": 0.51953125, |
| "learning_rate": 1.3569609400268112e-05, |
| "loss": 0.7386208772659302, |
| "step": 1256 |
| }, |
| { |
| "epoch": 2.676595744680851, |
| "grad_norm": 0.33984375, |
| "learning_rate": 1.3529797849732183e-05, |
| "loss": 0.5939998626708984, |
| "step": 1258 |
| }, |
| { |
| "epoch": 2.6808510638297873, |
| "grad_norm": 0.494140625, |
| "learning_rate": 1.3490481070798797e-05, |
| "loss": 0.6285054683685303, |
| "step": 1260 |
| }, |
| { |
| "epoch": 2.6851063829787236, |
| "grad_norm": 0.6171875, |
| "learning_rate": 1.34516599291845e-05, |
| "loss": 0.7830681800842285, |
| "step": 1262 |
| }, |
| { |
| "epoch": 2.6893617021276595, |
| "grad_norm": 2.609375, |
| "learning_rate": 1.3413335279692392e-05, |
| "loss": 0.7019430994987488, |
| "step": 1264 |
| }, |
| { |
| "epoch": 2.6936170212765957, |
| "grad_norm": 1.1328125, |
| "learning_rate": 1.3375507966193309e-05, |
| "loss": 0.5392411947250366, |
| "step": 1266 |
| }, |
| { |
| "epoch": 2.697872340425532, |
| "grad_norm": 0.4609375, |
| "learning_rate": 1.3338178821607234e-05, |
| "loss": 0.8191847205162048, |
| "step": 1268 |
| }, |
| { |
| "epoch": 2.702127659574468, |
| "grad_norm": 0.53515625, |
| "learning_rate": 1.3301348667884975e-05, |
| "loss": 0.30988848209381104, |
| "step": 1270 |
| }, |
| { |
| "epoch": 2.706382978723404, |
| "grad_norm": 0.474609375, |
| "learning_rate": 1.3265018315990046e-05, |
| "loss": 0.5801700353622437, |
| "step": 1272 |
| }, |
| { |
| "epoch": 2.7106382978723405, |
| "grad_norm": 0.390625, |
| "learning_rate": 1.3229188565880827e-05, |
| "loss": 0.6832275390625, |
| "step": 1274 |
| }, |
| { |
| "epoch": 2.7148936170212767, |
| "grad_norm": 0.435546875, |
| "learning_rate": 1.3193860206492936e-05, |
| "loss": 0.6654208898544312, |
| "step": 1276 |
| }, |
| { |
| "epoch": 2.719148936170213, |
| "grad_norm": 1.078125, |
| "learning_rate": 1.3159034015721865e-05, |
| "loss": 0.7127547860145569, |
| "step": 1278 |
| }, |
| { |
| "epoch": 2.723404255319149, |
| "grad_norm": 0.5859375, |
| "learning_rate": 1.3124710760405853e-05, |
| "loss": 0.537979781627655, |
| "step": 1280 |
| }, |
| { |
| "epoch": 2.727659574468085, |
| "grad_norm": 0.546875, |
| "learning_rate": 1.3090891196309e-05, |
| "loss": 0.6351765990257263, |
| "step": 1282 |
| }, |
| { |
| "epoch": 2.731914893617021, |
| "grad_norm": 0.390625, |
| "learning_rate": 1.3057576068104621e-05, |
| "loss": 0.601453959941864, |
| "step": 1284 |
| }, |
| { |
| "epoch": 2.7361702127659573, |
| "grad_norm": 0.53515625, |
| "learning_rate": 1.3024766109358845e-05, |
| "loss": 0.7181084752082825, |
| "step": 1286 |
| }, |
| { |
| "epoch": 2.7404255319148936, |
| "grad_norm": 0.59765625, |
| "learning_rate": 1.2992462042514483e-05, |
| "loss": 0.5218726396560669, |
| "step": 1288 |
| }, |
| { |
| "epoch": 2.74468085106383, |
| "grad_norm": 0.51953125, |
| "learning_rate": 1.2960664578875104e-05, |
| "loss": 0.7979943156242371, |
| "step": 1290 |
| }, |
| { |
| "epoch": 2.748936170212766, |
| "grad_norm": 0.64453125, |
| "learning_rate": 1.2929374418589363e-05, |
| "loss": 0.600561797618866, |
| "step": 1292 |
| }, |
| { |
| "epoch": 2.753191489361702, |
| "grad_norm": 0.578125, |
| "learning_rate": 1.289859225063562e-05, |
| "loss": 0.5805540084838867, |
| "step": 1294 |
| }, |
| { |
| "epoch": 2.7574468085106383, |
| "grad_norm": 0.5390625, |
| "learning_rate": 1.2868318752806726e-05, |
| "loss": 0.5937775373458862, |
| "step": 1296 |
| }, |
| { |
| "epoch": 2.7617021276595746, |
| "grad_norm": 0.6328125, |
| "learning_rate": 1.2838554591695126e-05, |
| "loss": 0.4572460353374481, |
| "step": 1298 |
| }, |
| { |
| "epoch": 2.7659574468085104, |
| "grad_norm": 0.447265625, |
| "learning_rate": 1.2809300422678187e-05, |
| "loss": 0.7637752294540405, |
| "step": 1300 |
| }, |
| { |
| "epoch": 2.7702127659574467, |
| "grad_norm": 0.53515625, |
| "learning_rate": 1.2780556889903737e-05, |
| "loss": 0.48286503553390503, |
| "step": 1302 |
| }, |
| { |
| "epoch": 2.774468085106383, |
| "grad_norm": 0.828125, |
| "learning_rate": 1.275232462627591e-05, |
| "loss": 0.5692922472953796, |
| "step": 1304 |
| }, |
| { |
| "epoch": 2.7787234042553193, |
| "grad_norm": 0.7734375, |
| "learning_rate": 1.2724604253441195e-05, |
| "loss": 0.49659186601638794, |
| "step": 1306 |
| }, |
| { |
| "epoch": 2.7829787234042556, |
| "grad_norm": 0.671875, |
| "learning_rate": 1.2697396381774753e-05, |
| "loss": 0.7237148880958557, |
| "step": 1308 |
| }, |
| { |
| "epoch": 2.7872340425531914, |
| "grad_norm": 0.44921875, |
| "learning_rate": 1.2670701610366985e-05, |
| "loss": 0.5669375658035278, |
| "step": 1310 |
| }, |
| { |
| "epoch": 2.7914893617021277, |
| "grad_norm": 0.38671875, |
| "learning_rate": 1.2644520527010319e-05, |
| "loss": 0.49942082166671753, |
| "step": 1312 |
| }, |
| { |
| "epoch": 2.795744680851064, |
| "grad_norm": 0.47265625, |
| "learning_rate": 1.2618853708186294e-05, |
| "loss": 0.5854433178901672, |
| "step": 1314 |
| }, |
| { |
| "epoch": 2.8, |
| "grad_norm": 0.890625, |
| "learning_rate": 1.2593701719052839e-05, |
| "loss": 0.4207378625869751, |
| "step": 1316 |
| }, |
| { |
| "epoch": 2.804255319148936, |
| "grad_norm": 0.65625, |
| "learning_rate": 1.2569065113431854e-05, |
| "loss": 0.6822559237480164, |
| "step": 1318 |
| }, |
| { |
| "epoch": 2.8085106382978724, |
| "grad_norm": 0.52734375, |
| "learning_rate": 1.2544944433797002e-05, |
| "loss": 0.5026736855506897, |
| "step": 1320 |
| }, |
| { |
| "epoch": 2.8127659574468087, |
| "grad_norm": 0.7109375, |
| "learning_rate": 1.252134021126177e-05, |
| "loss": 0.7221361398696899, |
| "step": 1322 |
| }, |
| { |
| "epoch": 2.8170212765957445, |
| "grad_norm": 0.6171875, |
| "learning_rate": 1.2498252965567755e-05, |
| "loss": 0.6420372724533081, |
| "step": 1324 |
| }, |
| { |
| "epoch": 2.821276595744681, |
| "grad_norm": 0.60546875, |
| "learning_rate": 1.2475683205073255e-05, |
| "loss": 0.5234490633010864, |
| "step": 1326 |
| }, |
| { |
| "epoch": 2.825531914893617, |
| "grad_norm": 0.482421875, |
| "learning_rate": 1.2453631426742047e-05, |
| "loss": 0.51298987865448, |
| "step": 1328 |
| }, |
| { |
| "epoch": 2.829787234042553, |
| "grad_norm": 0.5625, |
| "learning_rate": 1.2432098116132458e-05, |
| "loss": 0.5665377378463745, |
| "step": 1330 |
| }, |
| { |
| "epoch": 2.8340425531914892, |
| "grad_norm": 0.5703125, |
| "learning_rate": 1.2411083747386662e-05, |
| "loss": 0.7207722067832947, |
| "step": 1332 |
| }, |
| { |
| "epoch": 2.8382978723404255, |
| "grad_norm": 0.50390625, |
| "learning_rate": 1.2390588783220257e-05, |
| "loss": 0.595878005027771, |
| "step": 1334 |
| }, |
| { |
| "epoch": 2.842553191489362, |
| "grad_norm": 0.94921875, |
| "learning_rate": 1.2370613674912055e-05, |
| "loss": 0.7001873850822449, |
| "step": 1336 |
| }, |
| { |
| "epoch": 2.846808510638298, |
| "grad_norm": 0.46484375, |
| "learning_rate": 1.2351158862294165e-05, |
| "loss": 0.5404136776924133, |
| "step": 1338 |
| }, |
| { |
| "epoch": 2.851063829787234, |
| "grad_norm": 0.52734375, |
| "learning_rate": 1.2332224773742298e-05, |
| "loss": 0.5846556425094604, |
| "step": 1340 |
| }, |
| { |
| "epoch": 2.8553191489361702, |
| "grad_norm": 0.4609375, |
| "learning_rate": 1.231381182616634e-05, |
| "loss": 0.4478878080844879, |
| "step": 1342 |
| }, |
| { |
| "epoch": 2.8595744680851065, |
| "grad_norm": 0.625, |
| "learning_rate": 1.229592042500116e-05, |
| "loss": 0.49717509746551514, |
| "step": 1344 |
| }, |
| { |
| "epoch": 2.8638297872340424, |
| "grad_norm": 0.57421875, |
| "learning_rate": 1.2278550964197693e-05, |
| "loss": 0.650644063949585, |
| "step": 1346 |
| }, |
| { |
| "epoch": 2.8680851063829786, |
| "grad_norm": 0.375, |
| "learning_rate": 1.2261703826214275e-05, |
| "loss": 0.6666759252548218, |
| "step": 1348 |
| }, |
| { |
| "epoch": 2.872340425531915, |
| "grad_norm": 0.5546875, |
| "learning_rate": 1.2245379382008206e-05, |
| "loss": 0.7478127479553223, |
| "step": 1350 |
| }, |
| { |
| "epoch": 2.876595744680851, |
| "grad_norm": 0.482421875, |
| "learning_rate": 1.2229577991027574e-05, |
| "loss": 0.7170307636260986, |
| "step": 1352 |
| }, |
| { |
| "epoch": 2.8808510638297875, |
| "grad_norm": 0.67578125, |
| "learning_rate": 1.2214300001203369e-05, |
| "loss": 0.8161506056785583, |
| "step": 1354 |
| }, |
| { |
| "epoch": 2.8851063829787233, |
| "grad_norm": 0.9375, |
| "learning_rate": 1.2199545748941797e-05, |
| "loss": 0.49168941378593445, |
| "step": 1356 |
| }, |
| { |
| "epoch": 2.8893617021276596, |
| "grad_norm": 0.50390625, |
| "learning_rate": 1.2185315559116883e-05, |
| "loss": 0.46535876393318176, |
| "step": 1358 |
| }, |
| { |
| "epoch": 2.8936170212765955, |
| "grad_norm": 0.57421875, |
| "learning_rate": 1.217160974506332e-05, |
| "loss": 0.770456850528717, |
| "step": 1360 |
| }, |
| { |
| "epoch": 2.8978723404255318, |
| "grad_norm": 0.578125, |
| "learning_rate": 1.2158428608569563e-05, |
| "loss": 0.6479332447052002, |
| "step": 1362 |
| }, |
| { |
| "epoch": 2.902127659574468, |
| "grad_norm": 0.71484375, |
| "learning_rate": 1.2145772439871186e-05, |
| "loss": 0.5973125100135803, |
| "step": 1364 |
| }, |
| { |
| "epoch": 2.9063829787234043, |
| "grad_norm": 0.6484375, |
| "learning_rate": 1.2133641517644488e-05, |
| "loss": 0.5936951637268066, |
| "step": 1366 |
| }, |
| { |
| "epoch": 2.9106382978723406, |
| "grad_norm": 0.64453125, |
| "learning_rate": 1.2122036109000369e-05, |
| "loss": 0.5945168733596802, |
| "step": 1368 |
| }, |
| { |
| "epoch": 2.9148936170212765, |
| "grad_norm": 0.42578125, |
| "learning_rate": 1.2110956469478434e-05, |
| "loss": 0.799973726272583, |
| "step": 1370 |
| }, |
| { |
| "epoch": 2.9191489361702128, |
| "grad_norm": 0.4453125, |
| "learning_rate": 1.2100402843041378e-05, |
| "loss": 0.5132302641868591, |
| "step": 1372 |
| }, |
| { |
| "epoch": 2.923404255319149, |
| "grad_norm": 0.578125, |
| "learning_rate": 1.2090375462069602e-05, |
| "loss": 0.6212731599807739, |
| "step": 1374 |
| }, |
| { |
| "epoch": 2.927659574468085, |
| "grad_norm": 0.416015625, |
| "learning_rate": 1.208087454735611e-05, |
| "loss": 0.6145610809326172, |
| "step": 1376 |
| }, |
| { |
| "epoch": 2.931914893617021, |
| "grad_norm": 0.65234375, |
| "learning_rate": 1.2071900308101635e-05, |
| "loss": 0.7530128359794617, |
| "step": 1378 |
| }, |
| { |
| "epoch": 2.9361702127659575, |
| "grad_norm": 0.54296875, |
| "learning_rate": 1.2063452941910037e-05, |
| "loss": 0.5762054324150085, |
| "step": 1380 |
| }, |
| { |
| "epoch": 2.9404255319148938, |
| "grad_norm": 0.490234375, |
| "learning_rate": 1.205553263478396e-05, |
| "loss": 0.7723484039306641, |
| "step": 1382 |
| }, |
| { |
| "epoch": 2.94468085106383, |
| "grad_norm": 0.5234375, |
| "learning_rate": 1.2048139561120719e-05, |
| "loss": 0.6207926869392395, |
| "step": 1384 |
| }, |
| { |
| "epoch": 2.948936170212766, |
| "grad_norm": 0.58984375, |
| "learning_rate": 1.2041273883708483e-05, |
| "loss": 0.6677027940750122, |
| "step": 1386 |
| }, |
| { |
| "epoch": 2.953191489361702, |
| "grad_norm": 0.47265625, |
| "learning_rate": 1.2034935753722666e-05, |
| "loss": 0.7704895734786987, |
| "step": 1388 |
| }, |
| { |
| "epoch": 2.9574468085106385, |
| "grad_norm": 0.4453125, |
| "learning_rate": 1.2029125310722613e-05, |
| "loss": 0.7197574377059937, |
| "step": 1390 |
| }, |
| { |
| "epoch": 2.9617021276595743, |
| "grad_norm": 0.51953125, |
| "learning_rate": 1.202384268264853e-05, |
| "loss": 0.7283585667610168, |
| "step": 1392 |
| }, |
| { |
| "epoch": 2.9659574468085106, |
| "grad_norm": 0.5078125, |
| "learning_rate": 1.2019087985818655e-05, |
| "loss": 0.5656808018684387, |
| "step": 1394 |
| }, |
| { |
| "epoch": 2.970212765957447, |
| "grad_norm": 0.87109375, |
| "learning_rate": 1.2014861324926705e-05, |
| "loss": 0.49777036905288696, |
| "step": 1396 |
| }, |
| { |
| "epoch": 2.974468085106383, |
| "grad_norm": 0.61328125, |
| "learning_rate": 1.2011162793039564e-05, |
| "loss": 0.6693958640098572, |
| "step": 1398 |
| }, |
| { |
| "epoch": 2.978723404255319, |
| "grad_norm": 0.439453125, |
| "learning_rate": 1.2007992471595247e-05, |
| "loss": 0.5529562830924988, |
| "step": 1400 |
| }, |
| { |
| "epoch": 2.9829787234042553, |
| "grad_norm": 0.4375, |
| "learning_rate": 1.2005350430401092e-05, |
| "loss": 0.5693247318267822, |
| "step": 1402 |
| }, |
| { |
| "epoch": 2.9872340425531916, |
| "grad_norm": 0.3359375, |
| "learning_rate": 1.2003236727632224e-05, |
| "loss": 0.5829908847808838, |
| "step": 1404 |
| }, |
| { |
| "epoch": 2.9914893617021274, |
| "grad_norm": 0.484375, |
| "learning_rate": 1.2001651409830295e-05, |
| "loss": 0.6101734638214111, |
| "step": 1406 |
| }, |
| { |
| "epoch": 2.9957446808510637, |
| "grad_norm": 0.43359375, |
| "learning_rate": 1.2000594511902426e-05, |
| "loss": 0.6352876424789429, |
| "step": 1408 |
| }, |
| { |
| "epoch": 3.0, |
| "grad_norm": 0.83203125, |
| "learning_rate": 1.2000066057120467e-05, |
| "loss": 0.3924168646335602, |
| "step": 1410 |
| }, |
| { |
| "epoch": 3.0, |
| "step": 1410, |
| "total_flos": 4.1743170019314893e+18, |
| "train_loss": 0.9459603985573383, |
| "train_runtime": 10552.9203, |
| "train_samples_per_second": 4.276, |
| "train_steps_per_second": 0.134 |
| } |
| ], |
| "logging_steps": 2, |
| "max_steps": 1410, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 99999, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 4.1743170019314893e+18, |
| "train_batch_size": 2, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|