| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 3.0, |
| "eval_steps": 500, |
| "global_step": 2487, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0012062726176115801, |
| "grad_norm": 51.38860427847401, |
| "learning_rate": 2.0080321285140563e-07, |
| "loss": 11.2525, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0024125452352231603, |
| "grad_norm": 53.36026048366278, |
| "learning_rate": 4.0160642570281125e-07, |
| "loss": 11.2364, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.0036188178528347406, |
| "grad_norm": 53.32175177566749, |
| "learning_rate": 6.024096385542169e-07, |
| "loss": 11.1112, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.0048250904704463205, |
| "grad_norm": 54.3523155769173, |
| "learning_rate": 8.032128514056225e-07, |
| "loss": 11.017, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.006031363088057901, |
| "grad_norm": 53.85262766532482, |
| "learning_rate": 1.0040160642570282e-06, |
| "loss": 11.1188, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.007237635705669481, |
| "grad_norm": 54.13262202877557, |
| "learning_rate": 1.2048192771084338e-06, |
| "loss": 11.0221, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.008443908323281062, |
| "grad_norm": 56.227774935885684, |
| "learning_rate": 1.4056224899598394e-06, |
| "loss": 10.8553, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.009650180940892641, |
| "grad_norm": 59.35662182504017, |
| "learning_rate": 1.606425702811245e-06, |
| "loss": 10.7837, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.010856453558504222, |
| "grad_norm": 58.81276787565515, |
| "learning_rate": 1.8072289156626506e-06, |
| "loss": 10.7875, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.012062726176115802, |
| "grad_norm": 60.2995135454209, |
| "learning_rate": 2.0080321285140564e-06, |
| "loss": 10.7267, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.013268998793727383, |
| "grad_norm": 80.16682488222419, |
| "learning_rate": 2.208835341365462e-06, |
| "loss": 9.5366, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.014475271411338963, |
| "grad_norm": 80.01557833575397, |
| "learning_rate": 2.4096385542168676e-06, |
| "loss": 9.5007, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.015681544028950542, |
| "grad_norm": 101.46955180189386, |
| "learning_rate": 2.610441767068273e-06, |
| "loss": 8.905, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.016887816646562123, |
| "grad_norm": 81.57781816299472, |
| "learning_rate": 2.811244979919679e-06, |
| "loss": 4.339, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.018094089264173704, |
| "grad_norm": 64.68209625020934, |
| "learning_rate": 3.0120481927710846e-06, |
| "loss": 3.6619, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.019300361881785282, |
| "grad_norm": 56.85869610525475, |
| "learning_rate": 3.21285140562249e-06, |
| "loss": 3.4092, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.020506634499396863, |
| "grad_norm": 40.39884604434922, |
| "learning_rate": 3.413654618473896e-06, |
| "loss": 2.7495, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.021712907117008445, |
| "grad_norm": 33.48382966020048, |
| "learning_rate": 3.614457831325301e-06, |
| "loss": 2.5099, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.022919179734620022, |
| "grad_norm": 9.068825320578902, |
| "learning_rate": 3.8152610441767074e-06, |
| "loss": 1.5497, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.024125452352231604, |
| "grad_norm": 5.430810054078272, |
| "learning_rate": 4.016064257028113e-06, |
| "loss": 1.3625, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.025331724969843185, |
| "grad_norm": 4.369877876698654, |
| "learning_rate": 4.216867469879518e-06, |
| "loss": 1.2633, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.026537997587454766, |
| "grad_norm": 3.6966719303916666, |
| "learning_rate": 4.417670682730924e-06, |
| "loss": 1.2189, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.027744270205066344, |
| "grad_norm": 2.7012172428180246, |
| "learning_rate": 4.618473895582329e-06, |
| "loss": 1.127, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.028950542822677925, |
| "grad_norm": 2.1966628156071977, |
| "learning_rate": 4.819277108433735e-06, |
| "loss": 1.0832, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.030156815440289506, |
| "grad_norm": 1.9505800224070988, |
| "learning_rate": 5.020080321285141e-06, |
| "loss": 1.0821, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.031363088057901084, |
| "grad_norm": 1.4033827272745318, |
| "learning_rate": 5.220883534136546e-06, |
| "loss": 0.9829, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.032569360675512665, |
| "grad_norm": 25.83482630979627, |
| "learning_rate": 5.421686746987952e-06, |
| "loss": 0.9409, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.033775633293124246, |
| "grad_norm": 8.816041565642406, |
| "learning_rate": 5.622489959839358e-06, |
| "loss": 0.9217, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.03498190591073583, |
| "grad_norm": 1.865924593494002, |
| "learning_rate": 5.823293172690764e-06, |
| "loss": 0.9477, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.03618817852834741, |
| "grad_norm": 1.185561066111754, |
| "learning_rate": 6.024096385542169e-06, |
| "loss": 0.9124, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.03739445114595899, |
| "grad_norm": 1.0072345862575207, |
| "learning_rate": 6.224899598393575e-06, |
| "loss": 0.8344, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.038600723763570564, |
| "grad_norm": 0.8930661954864261, |
| "learning_rate": 6.42570281124498e-06, |
| "loss": 0.802, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.039806996381182146, |
| "grad_norm": 0.8218534816604156, |
| "learning_rate": 6.626506024096386e-06, |
| "loss": 0.8519, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.04101326899879373, |
| "grad_norm": 0.7409176841066717, |
| "learning_rate": 6.827309236947792e-06, |
| "loss": 0.8252, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.04221954161640531, |
| "grad_norm": 0.694890954906707, |
| "learning_rate": 7.028112449799197e-06, |
| "loss": 0.7267, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.04342581423401689, |
| "grad_norm": 1.0238217234534817, |
| "learning_rate": 7.228915662650602e-06, |
| "loss": 0.8264, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.04463208685162847, |
| "grad_norm": 0.6077731225781826, |
| "learning_rate": 7.429718875502008e-06, |
| "loss": 0.7901, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.045838359469240045, |
| "grad_norm": 0.6066967650950137, |
| "learning_rate": 7.630522088353415e-06, |
| "loss": 0.7383, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.047044632086851626, |
| "grad_norm": 0.6209543801705237, |
| "learning_rate": 7.83132530120482e-06, |
| "loss": 0.7057, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.04825090470446321, |
| "grad_norm": 0.5825557752671116, |
| "learning_rate": 8.032128514056226e-06, |
| "loss": 0.6934, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.04945717732207479, |
| "grad_norm": 0.5769538262434953, |
| "learning_rate": 8.232931726907631e-06, |
| "loss": 0.7009, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.05066344993968637, |
| "grad_norm": 0.5003790590362588, |
| "learning_rate": 8.433734939759036e-06, |
| "loss": 0.6833, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.05186972255729795, |
| "grad_norm": 0.5156286849928355, |
| "learning_rate": 8.634538152610444e-06, |
| "loss": 0.7108, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.05307599517490953, |
| "grad_norm": 0.5113020018822674, |
| "learning_rate": 8.835341365461847e-06, |
| "loss": 0.6978, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.054282267792521106, |
| "grad_norm": 0.49956584530074893, |
| "learning_rate": 9.036144578313253e-06, |
| "loss": 0.6553, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.05548854041013269, |
| "grad_norm": 0.488845369844212, |
| "learning_rate": 9.236947791164658e-06, |
| "loss": 0.6637, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.05669481302774427, |
| "grad_norm": 0.4479235016367263, |
| "learning_rate": 9.437751004016063e-06, |
| "loss": 0.6509, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.05790108564535585, |
| "grad_norm": 0.4407343454865804, |
| "learning_rate": 9.63855421686747e-06, |
| "loss": 0.6484, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.05910735826296743, |
| "grad_norm": 0.4661813735492102, |
| "learning_rate": 9.839357429718876e-06, |
| "loss": 0.6106, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.06031363088057901, |
| "grad_norm": 0.4688851051826549, |
| "learning_rate": 1.0040160642570281e-05, |
| "loss": 0.6536, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.061519903498190594, |
| "grad_norm": 0.44874471772574015, |
| "learning_rate": 1.0240963855421687e-05, |
| "loss": 0.6356, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.06272617611580217, |
| "grad_norm": 0.4205764980483692, |
| "learning_rate": 1.0441767068273092e-05, |
| "loss": 0.6395, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.06393244873341375, |
| "grad_norm": 0.3773662466705985, |
| "learning_rate": 1.0642570281124499e-05, |
| "loss": 0.6105, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.06513872135102533, |
| "grad_norm": 0.36862909221937795, |
| "learning_rate": 1.0843373493975904e-05, |
| "loss": 0.6087, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.06634499396863691, |
| "grad_norm": 0.4535482567802974, |
| "learning_rate": 1.104417670682731e-05, |
| "loss": 0.6261, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.06755126658624849, |
| "grad_norm": 0.35536767227242044, |
| "learning_rate": 1.1244979919678715e-05, |
| "loss": 0.6005, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.06875753920386007, |
| "grad_norm": 0.3424177957048031, |
| "learning_rate": 1.144578313253012e-05, |
| "loss": 0.6432, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.06996381182147166, |
| "grad_norm": 0.3701788632145171, |
| "learning_rate": 1.1646586345381528e-05, |
| "loss": 0.6008, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.07117008443908324, |
| "grad_norm": 0.3101793503776111, |
| "learning_rate": 1.1847389558232933e-05, |
| "loss": 0.6143, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.07237635705669482, |
| "grad_norm": 0.3001818267393327, |
| "learning_rate": 1.2048192771084338e-05, |
| "loss": 0.6043, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.0735826296743064, |
| "grad_norm": 0.30001188823098984, |
| "learning_rate": 1.2248995983935744e-05, |
| "loss": 0.5812, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.07478890229191798, |
| "grad_norm": 0.3287508814851007, |
| "learning_rate": 1.244979919678715e-05, |
| "loss": 0.6174, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.07599517490952955, |
| "grad_norm": 0.31978551599047533, |
| "learning_rate": 1.2650602409638555e-05, |
| "loss": 0.6056, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.07720144752714113, |
| "grad_norm": 0.3117661373695458, |
| "learning_rate": 1.285140562248996e-05, |
| "loss": 0.5736, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.07840772014475271, |
| "grad_norm": 0.32162182570694564, |
| "learning_rate": 1.3052208835341367e-05, |
| "loss": 0.6024, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.07961399276236429, |
| "grad_norm": 0.3119082159220764, |
| "learning_rate": 1.3253012048192772e-05, |
| "loss": 0.5884, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.08082026537997587, |
| "grad_norm": 0.3754051455489471, |
| "learning_rate": 1.3453815261044178e-05, |
| "loss": 0.6004, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.08202653799758745, |
| "grad_norm": 0.3109694023561302, |
| "learning_rate": 1.3654618473895583e-05, |
| "loss": 0.5617, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.08323281061519903, |
| "grad_norm": 0.28664605601137993, |
| "learning_rate": 1.3855421686746989e-05, |
| "loss": 0.5829, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.08443908323281062, |
| "grad_norm": 0.31651626321139076, |
| "learning_rate": 1.4056224899598394e-05, |
| "loss": 0.5846, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.0856453558504222, |
| "grad_norm": 0.28296350697482814, |
| "learning_rate": 1.42570281124498e-05, |
| "loss": 0.5715, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.08685162846803378, |
| "grad_norm": 0.3168399355296006, |
| "learning_rate": 1.4457831325301205e-05, |
| "loss": 0.5613, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.08805790108564536, |
| "grad_norm": 0.2955763208938298, |
| "learning_rate": 1.465863453815261e-05, |
| "loss": 0.5761, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.08926417370325694, |
| "grad_norm": 0.2660115535346986, |
| "learning_rate": 1.4859437751004016e-05, |
| "loss": 0.5814, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.09047044632086852, |
| "grad_norm": 0.31092486179587725, |
| "learning_rate": 1.5060240963855424e-05, |
| "loss": 0.5901, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.09167671893848009, |
| "grad_norm": 0.2672347270128979, |
| "learning_rate": 1.526104417670683e-05, |
| "loss": 0.5715, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.09288299155609167, |
| "grad_norm": 0.31255025482262866, |
| "learning_rate": 1.5461847389558235e-05, |
| "loss": 0.5856, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.09408926417370325, |
| "grad_norm": 0.2981092204177073, |
| "learning_rate": 1.566265060240964e-05, |
| "loss": 0.5938, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.09529553679131483, |
| "grad_norm": 0.2901487206816295, |
| "learning_rate": 1.5863453815261046e-05, |
| "loss": 0.5699, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.09650180940892641, |
| "grad_norm": 0.3242024566962533, |
| "learning_rate": 1.606425702811245e-05, |
| "loss": 0.5648, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.097708082026538, |
| "grad_norm": 0.2901732345581997, |
| "learning_rate": 1.6265060240963857e-05, |
| "loss": 0.5567, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.09891435464414958, |
| "grad_norm": 0.30430875830826826, |
| "learning_rate": 1.6465863453815262e-05, |
| "loss": 0.5482, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.10012062726176116, |
| "grad_norm": 0.2693405288007236, |
| "learning_rate": 1.6666666666666667e-05, |
| "loss": 0.533, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.10132689987937274, |
| "grad_norm": 0.3226079166873796, |
| "learning_rate": 1.6867469879518073e-05, |
| "loss": 0.5636, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.10253317249698432, |
| "grad_norm": 0.28557229465998485, |
| "learning_rate": 1.706827309236948e-05, |
| "loss": 0.5521, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.1037394451145959, |
| "grad_norm": 0.3137906150959221, |
| "learning_rate": 1.7269076305220887e-05, |
| "loss": 0.5475, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.10494571773220748, |
| "grad_norm": 0.2814428939492701, |
| "learning_rate": 1.7469879518072292e-05, |
| "loss": 0.5297, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.10615199034981906, |
| "grad_norm": 0.27927754569488406, |
| "learning_rate": 1.7670682730923694e-05, |
| "loss": 0.5571, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.10735826296743065, |
| "grad_norm": 0.29699094559241995, |
| "learning_rate": 1.78714859437751e-05, |
| "loss": 0.5546, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.10856453558504221, |
| "grad_norm": 0.30506480963814275, |
| "learning_rate": 1.8072289156626505e-05, |
| "loss": 0.5442, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.1097708082026538, |
| "grad_norm": 0.29804871816448125, |
| "learning_rate": 1.827309236947791e-05, |
| "loss": 0.537, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.11097708082026538, |
| "grad_norm": 0.3356618966578361, |
| "learning_rate": 1.8473895582329316e-05, |
| "loss": 0.5487, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.11218335343787696, |
| "grad_norm": 0.29121306377344314, |
| "learning_rate": 1.867469879518072e-05, |
| "loss": 0.5313, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.11338962605548854, |
| "grad_norm": 0.3004619917101442, |
| "learning_rate": 1.8875502008032127e-05, |
| "loss": 0.5113, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.11459589867310012, |
| "grad_norm": 0.30776858982546973, |
| "learning_rate": 1.9076305220883535e-05, |
| "loss": 0.549, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.1158021712907117, |
| "grad_norm": 0.2988781536907027, |
| "learning_rate": 1.927710843373494e-05, |
| "loss": 0.5426, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.11700844390832328, |
| "grad_norm": 0.306483722050362, |
| "learning_rate": 1.9477911646586346e-05, |
| "loss": 0.5325, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.11821471652593486, |
| "grad_norm": 0.3321830088641836, |
| "learning_rate": 1.967871485943775e-05, |
| "loss": 0.5573, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.11942098914354644, |
| "grad_norm": 0.30241064255057876, |
| "learning_rate": 1.9879518072289157e-05, |
| "loss": 0.5362, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.12062726176115803, |
| "grad_norm": 0.3058839687307177, |
| "learning_rate": 2.0080321285140562e-05, |
| "loss": 0.5259, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.1218335343787696, |
| "grad_norm": 0.3495388835277465, |
| "learning_rate": 2.0281124497991968e-05, |
| "loss": 0.535, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.12303980699638119, |
| "grad_norm": 0.2959606492457292, |
| "learning_rate": 2.0481927710843373e-05, |
| "loss": 0.5246, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.12424607961399277, |
| "grad_norm": 0.39290570723266705, |
| "learning_rate": 2.068273092369478e-05, |
| "loss": 0.5455, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.12545235223160434, |
| "grad_norm": 0.2926526545122298, |
| "learning_rate": 2.0883534136546184e-05, |
| "loss": 0.5248, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.12665862484921592, |
| "grad_norm": 0.35016768106063734, |
| "learning_rate": 2.1084337349397593e-05, |
| "loss": 0.5269, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.1278648974668275, |
| "grad_norm": 0.3699271965637879, |
| "learning_rate": 2.1285140562248998e-05, |
| "loss": 0.5615, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.12907117008443908, |
| "grad_norm": 0.31020871716617304, |
| "learning_rate": 2.1485943775100404e-05, |
| "loss": 0.532, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.13027744270205066, |
| "grad_norm": 0.2939373901082531, |
| "learning_rate": 2.168674698795181e-05, |
| "loss": 0.5288, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.13148371531966224, |
| "grad_norm": 0.33537715712157723, |
| "learning_rate": 2.1887550200803214e-05, |
| "loss": 0.5386, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.13268998793727382, |
| "grad_norm": 0.30785931040019476, |
| "learning_rate": 2.208835341365462e-05, |
| "loss": 0.5138, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.1338962605548854, |
| "grad_norm": 0.3962283608203197, |
| "learning_rate": 2.2289156626506025e-05, |
| "loss": 0.5295, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.13510253317249699, |
| "grad_norm": 0.3089933224294988, |
| "learning_rate": 2.248995983935743e-05, |
| "loss": 0.5313, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.13630880579010857, |
| "grad_norm": 0.3401641479121955, |
| "learning_rate": 2.2690763052208836e-05, |
| "loss": 0.5454, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.13751507840772015, |
| "grad_norm": 0.4083258381690134, |
| "learning_rate": 2.289156626506024e-05, |
| "loss": 0.5447, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.13872135102533173, |
| "grad_norm": 0.3268362769014123, |
| "learning_rate": 2.309236947791165e-05, |
| "loss": 0.5139, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.1399276236429433, |
| "grad_norm": 0.36409033931151674, |
| "learning_rate": 2.3293172690763055e-05, |
| "loss": 0.5043, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.1411338962605549, |
| "grad_norm": 0.3333308518450483, |
| "learning_rate": 2.349397590361446e-05, |
| "loss": 0.5149, |
| "step": 117 |
| }, |
| { |
| "epoch": 0.14234016887816647, |
| "grad_norm": 0.31714621122727005, |
| "learning_rate": 2.3694779116465866e-05, |
| "loss": 0.5352, |
| "step": 118 |
| }, |
| { |
| "epoch": 0.14354644149577805, |
| "grad_norm": 0.38311242859384087, |
| "learning_rate": 2.389558232931727e-05, |
| "loss": 0.4967, |
| "step": 119 |
| }, |
| { |
| "epoch": 0.14475271411338964, |
| "grad_norm": 0.3379386762440851, |
| "learning_rate": 2.4096385542168677e-05, |
| "loss": 0.4944, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.14595898673100122, |
| "grad_norm": 0.38722007899872013, |
| "learning_rate": 2.4297188755020082e-05, |
| "loss": 0.5149, |
| "step": 121 |
| }, |
| { |
| "epoch": 0.1471652593486128, |
| "grad_norm": 0.3791244486209898, |
| "learning_rate": 2.4497991967871488e-05, |
| "loss": 0.5064, |
| "step": 122 |
| }, |
| { |
| "epoch": 0.14837153196622438, |
| "grad_norm": 0.3936057186514771, |
| "learning_rate": 2.4698795180722893e-05, |
| "loss": 0.5461, |
| "step": 123 |
| }, |
| { |
| "epoch": 0.14957780458383596, |
| "grad_norm": 0.4127497762279216, |
| "learning_rate": 2.48995983935743e-05, |
| "loss": 0.5356, |
| "step": 124 |
| }, |
| { |
| "epoch": 0.15078407720144751, |
| "grad_norm": 0.37304035188820656, |
| "learning_rate": 2.5100401606425704e-05, |
| "loss": 0.5357, |
| "step": 125 |
| }, |
| { |
| "epoch": 0.1519903498190591, |
| "grad_norm": 0.35071128415816427, |
| "learning_rate": 2.530120481927711e-05, |
| "loss": 0.5145, |
| "step": 126 |
| }, |
| { |
| "epoch": 0.15319662243667068, |
| "grad_norm": 0.3309060548291426, |
| "learning_rate": 2.5502008032128515e-05, |
| "loss": 0.4809, |
| "step": 127 |
| }, |
| { |
| "epoch": 0.15440289505428226, |
| "grad_norm": 0.3584753468597298, |
| "learning_rate": 2.570281124497992e-05, |
| "loss": 0.5187, |
| "step": 128 |
| }, |
| { |
| "epoch": 0.15560916767189384, |
| "grad_norm": 0.3895056053670569, |
| "learning_rate": 2.5903614457831325e-05, |
| "loss": 0.5319, |
| "step": 129 |
| }, |
| { |
| "epoch": 0.15681544028950542, |
| "grad_norm": 0.30890760385437577, |
| "learning_rate": 2.6104417670682734e-05, |
| "loss": 0.5046, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.158021712907117, |
| "grad_norm": 0.43680103931374326, |
| "learning_rate": 2.6305220883534136e-05, |
| "loss": 0.5127, |
| "step": 131 |
| }, |
| { |
| "epoch": 0.15922798552472858, |
| "grad_norm": 0.36126521977202153, |
| "learning_rate": 2.6506024096385545e-05, |
| "loss": 0.5057, |
| "step": 132 |
| }, |
| { |
| "epoch": 0.16043425814234016, |
| "grad_norm": 0.35586513761227634, |
| "learning_rate": 2.6706827309236947e-05, |
| "loss": 0.5197, |
| "step": 133 |
| }, |
| { |
| "epoch": 0.16164053075995174, |
| "grad_norm": 0.4173616426138242, |
| "learning_rate": 2.6907630522088356e-05, |
| "loss": 0.5262, |
| "step": 134 |
| }, |
| { |
| "epoch": 0.16284680337756333, |
| "grad_norm": 0.39991088553188797, |
| "learning_rate": 2.7108433734939758e-05, |
| "loss": 0.5202, |
| "step": 135 |
| }, |
| { |
| "epoch": 0.1640530759951749, |
| "grad_norm": 0.42015480982805903, |
| "learning_rate": 2.7309236947791167e-05, |
| "loss": 0.5122, |
| "step": 136 |
| }, |
| { |
| "epoch": 0.1652593486127865, |
| "grad_norm": 0.3943371071406799, |
| "learning_rate": 2.751004016064257e-05, |
| "loss": 0.5095, |
| "step": 137 |
| }, |
| { |
| "epoch": 0.16646562123039807, |
| "grad_norm": 0.35815925986601993, |
| "learning_rate": 2.7710843373493977e-05, |
| "loss": 0.4785, |
| "step": 138 |
| }, |
| { |
| "epoch": 0.16767189384800965, |
| "grad_norm": 0.3636720740754509, |
| "learning_rate": 2.791164658634538e-05, |
| "loss": 0.491, |
| "step": 139 |
| }, |
| { |
| "epoch": 0.16887816646562123, |
| "grad_norm": 0.36511051463449823, |
| "learning_rate": 2.8112449799196788e-05, |
| "loss": 0.4983, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.1700844390832328, |
| "grad_norm": 0.39354315214120617, |
| "learning_rate": 2.8313253012048197e-05, |
| "loss": 0.499, |
| "step": 141 |
| }, |
| { |
| "epoch": 0.1712907117008444, |
| "grad_norm": 0.3101636125632076, |
| "learning_rate": 2.85140562248996e-05, |
| "loss": 0.4863, |
| "step": 142 |
| }, |
| { |
| "epoch": 0.17249698431845598, |
| "grad_norm": 0.41813155310935585, |
| "learning_rate": 2.8714859437751008e-05, |
| "loss": 0.5134, |
| "step": 143 |
| }, |
| { |
| "epoch": 0.17370325693606756, |
| "grad_norm": 0.437321744526369, |
| "learning_rate": 2.891566265060241e-05, |
| "loss": 0.5083, |
| "step": 144 |
| }, |
| { |
| "epoch": 0.17490952955367914, |
| "grad_norm": 0.32546457563741027, |
| "learning_rate": 2.911646586345382e-05, |
| "loss": 0.4988, |
| "step": 145 |
| }, |
| { |
| "epoch": 0.17611580217129072, |
| "grad_norm": 0.45406780502076305, |
| "learning_rate": 2.931726907630522e-05, |
| "loss": 0.5034, |
| "step": 146 |
| }, |
| { |
| "epoch": 0.1773220747889023, |
| "grad_norm": 0.4279546537128257, |
| "learning_rate": 2.951807228915663e-05, |
| "loss": 0.4901, |
| "step": 147 |
| }, |
| { |
| "epoch": 0.17852834740651388, |
| "grad_norm": 0.42676213380125827, |
| "learning_rate": 2.971887550200803e-05, |
| "loss": 0.5164, |
| "step": 148 |
| }, |
| { |
| "epoch": 0.17973462002412546, |
| "grad_norm": 0.5195748580254218, |
| "learning_rate": 2.991967871485944e-05, |
| "loss": 0.4891, |
| "step": 149 |
| }, |
| { |
| "epoch": 0.18094089264173704, |
| "grad_norm": 0.4208106819963618, |
| "learning_rate": 3.012048192771085e-05, |
| "loss": 0.4932, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.18214716525934863, |
| "grad_norm": 0.3494159427966441, |
| "learning_rate": 3.032128514056225e-05, |
| "loss": 0.4823, |
| "step": 151 |
| }, |
| { |
| "epoch": 0.18335343787696018, |
| "grad_norm": 0.3823494518546758, |
| "learning_rate": 3.052208835341366e-05, |
| "loss": 0.4897, |
| "step": 152 |
| }, |
| { |
| "epoch": 0.18455971049457176, |
| "grad_norm": 0.4115543529305647, |
| "learning_rate": 3.072289156626506e-05, |
| "loss": 0.4991, |
| "step": 153 |
| }, |
| { |
| "epoch": 0.18576598311218334, |
| "grad_norm": 0.3958327481485804, |
| "learning_rate": 3.092369477911647e-05, |
| "loss": 0.5016, |
| "step": 154 |
| }, |
| { |
| "epoch": 0.18697225572979492, |
| "grad_norm": 0.39504172903294904, |
| "learning_rate": 3.112449799196787e-05, |
| "loss": 0.4892, |
| "step": 155 |
| }, |
| { |
| "epoch": 0.1881785283474065, |
| "grad_norm": 0.41868991663521743, |
| "learning_rate": 3.132530120481928e-05, |
| "loss": 0.5055, |
| "step": 156 |
| }, |
| { |
| "epoch": 0.18938480096501809, |
| "grad_norm": 0.3515111350200484, |
| "learning_rate": 3.152610441767068e-05, |
| "loss": 0.497, |
| "step": 157 |
| }, |
| { |
| "epoch": 0.19059107358262967, |
| "grad_norm": 0.3997733425198974, |
| "learning_rate": 3.172690763052209e-05, |
| "loss": 0.4958, |
| "step": 158 |
| }, |
| { |
| "epoch": 0.19179734620024125, |
| "grad_norm": 0.4525184693608031, |
| "learning_rate": 3.192771084337349e-05, |
| "loss": 0.5212, |
| "step": 159 |
| }, |
| { |
| "epoch": 0.19300361881785283, |
| "grad_norm": 0.3902705819333036, |
| "learning_rate": 3.21285140562249e-05, |
| "loss": 0.5178, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.1942098914354644, |
| "grad_norm": 0.5030587307913869, |
| "learning_rate": 3.232931726907631e-05, |
| "loss": 0.5108, |
| "step": 161 |
| }, |
| { |
| "epoch": 0.195416164053076, |
| "grad_norm": 0.3624196375454655, |
| "learning_rate": 3.253012048192771e-05, |
| "loss": 0.5097, |
| "step": 162 |
| }, |
| { |
| "epoch": 0.19662243667068757, |
| "grad_norm": 0.5221595299517153, |
| "learning_rate": 3.273092369477912e-05, |
| "loss": 0.5065, |
| "step": 163 |
| }, |
| { |
| "epoch": 0.19782870928829915, |
| "grad_norm": 0.40528581225043786, |
| "learning_rate": 3.2931726907630524e-05, |
| "loss": 0.4954, |
| "step": 164 |
| }, |
| { |
| "epoch": 0.19903498190591074, |
| "grad_norm": 0.4115164956369205, |
| "learning_rate": 3.313253012048193e-05, |
| "loss": 0.4924, |
| "step": 165 |
| }, |
| { |
| "epoch": 0.20024125452352232, |
| "grad_norm": 0.48390541674694987, |
| "learning_rate": 3.3333333333333335e-05, |
| "loss": 0.511, |
| "step": 166 |
| }, |
| { |
| "epoch": 0.2014475271411339, |
| "grad_norm": 0.4912910650075616, |
| "learning_rate": 3.353413654618474e-05, |
| "loss": 0.5156, |
| "step": 167 |
| }, |
| { |
| "epoch": 0.20265379975874548, |
| "grad_norm": 0.45223965087989415, |
| "learning_rate": 3.3734939759036146e-05, |
| "loss": 0.4873, |
| "step": 168 |
| }, |
| { |
| "epoch": 0.20386007237635706, |
| "grad_norm": 0.4462359140749097, |
| "learning_rate": 3.393574297188755e-05, |
| "loss": 0.4808, |
| "step": 169 |
| }, |
| { |
| "epoch": 0.20506634499396864, |
| "grad_norm": 0.45946545348291873, |
| "learning_rate": 3.413654618473896e-05, |
| "loss": 0.5084, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.20627261761158022, |
| "grad_norm": 0.4869113169396108, |
| "learning_rate": 3.433734939759036e-05, |
| "loss": 0.5226, |
| "step": 171 |
| }, |
| { |
| "epoch": 0.2074788902291918, |
| "grad_norm": 0.4529756289005346, |
| "learning_rate": 3.4538152610441774e-05, |
| "loss": 0.4973, |
| "step": 172 |
| }, |
| { |
| "epoch": 0.20868516284680338, |
| "grad_norm": 0.4143744137360863, |
| "learning_rate": 3.473895582329317e-05, |
| "loss": 0.508, |
| "step": 173 |
| }, |
| { |
| "epoch": 0.20989143546441497, |
| "grad_norm": 0.4351511961848573, |
| "learning_rate": 3.4939759036144585e-05, |
| "loss": 0.4734, |
| "step": 174 |
| }, |
| { |
| "epoch": 0.21109770808202655, |
| "grad_norm": 0.357771198622376, |
| "learning_rate": 3.5140562248995983e-05, |
| "loss": 0.5004, |
| "step": 175 |
| }, |
| { |
| "epoch": 0.21230398069963813, |
| "grad_norm": 0.40605221616071163, |
| "learning_rate": 3.534136546184739e-05, |
| "loss": 0.4883, |
| "step": 176 |
| }, |
| { |
| "epoch": 0.2135102533172497, |
| "grad_norm": 0.4667261994786403, |
| "learning_rate": 3.5542168674698794e-05, |
| "loss": 0.4899, |
| "step": 177 |
| }, |
| { |
| "epoch": 0.2147165259348613, |
| "grad_norm": 0.3282260272132664, |
| "learning_rate": 3.57429718875502e-05, |
| "loss": 0.4847, |
| "step": 178 |
| }, |
| { |
| "epoch": 0.21592279855247287, |
| "grad_norm": 0.45132796886106885, |
| "learning_rate": 3.5943775100401605e-05, |
| "loss": 0.4853, |
| "step": 179 |
| }, |
| { |
| "epoch": 0.21712907117008443, |
| "grad_norm": 0.37769190784059187, |
| "learning_rate": 3.614457831325301e-05, |
| "loss": 0.4793, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.218335343787696, |
| "grad_norm": 0.4105355197529797, |
| "learning_rate": 3.634538152610442e-05, |
| "loss": 0.4969, |
| "step": 181 |
| }, |
| { |
| "epoch": 0.2195416164053076, |
| "grad_norm": 0.4886523984077889, |
| "learning_rate": 3.654618473895582e-05, |
| "loss": 0.5004, |
| "step": 182 |
| }, |
| { |
| "epoch": 0.22074788902291917, |
| "grad_norm": 0.34526155199172764, |
| "learning_rate": 3.674698795180723e-05, |
| "loss": 0.5041, |
| "step": 183 |
| }, |
| { |
| "epoch": 0.22195416164053075, |
| "grad_norm": 0.536078133653887, |
| "learning_rate": 3.694779116465863e-05, |
| "loss": 0.4884, |
| "step": 184 |
| }, |
| { |
| "epoch": 0.22316043425814233, |
| "grad_norm": 0.41213708136617055, |
| "learning_rate": 3.7148594377510044e-05, |
| "loss": 0.4844, |
| "step": 185 |
| }, |
| { |
| "epoch": 0.2243667068757539, |
| "grad_norm": 0.42042027820779376, |
| "learning_rate": 3.734939759036144e-05, |
| "loss": 0.4894, |
| "step": 186 |
| }, |
| { |
| "epoch": 0.2255729794933655, |
| "grad_norm": 0.4622195360104783, |
| "learning_rate": 3.7550200803212855e-05, |
| "loss": 0.5162, |
| "step": 187 |
| }, |
| { |
| "epoch": 0.22677925211097708, |
| "grad_norm": 0.4754012578138651, |
| "learning_rate": 3.7751004016064253e-05, |
| "loss": 0.4942, |
| "step": 188 |
| }, |
| { |
| "epoch": 0.22798552472858866, |
| "grad_norm": 0.5123600834395359, |
| "learning_rate": 3.7951807228915666e-05, |
| "loss": 0.5037, |
| "step": 189 |
| }, |
| { |
| "epoch": 0.22919179734620024, |
| "grad_norm": 0.4950795192470773, |
| "learning_rate": 3.815261044176707e-05, |
| "loss": 0.5062, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.23039806996381182, |
| "grad_norm": 0.5675637905005346, |
| "learning_rate": 3.8353413654618476e-05, |
| "loss": 0.4962, |
| "step": 191 |
| }, |
| { |
| "epoch": 0.2316043425814234, |
| "grad_norm": 0.46625154792702506, |
| "learning_rate": 3.855421686746988e-05, |
| "loss": 0.4899, |
| "step": 192 |
| }, |
| { |
| "epoch": 0.23281061519903498, |
| "grad_norm": 0.5285632109360143, |
| "learning_rate": 3.875502008032129e-05, |
| "loss": 0.471, |
| "step": 193 |
| }, |
| { |
| "epoch": 0.23401688781664656, |
| "grad_norm": 0.7098580123670813, |
| "learning_rate": 3.895582329317269e-05, |
| "loss": 0.4902, |
| "step": 194 |
| }, |
| { |
| "epoch": 0.23522316043425814, |
| "grad_norm": 0.46566792189515693, |
| "learning_rate": 3.91566265060241e-05, |
| "loss": 0.4639, |
| "step": 195 |
| }, |
| { |
| "epoch": 0.23642943305186973, |
| "grad_norm": 0.48790846789133946, |
| "learning_rate": 3.93574297188755e-05, |
| "loss": 0.4687, |
| "step": 196 |
| }, |
| { |
| "epoch": 0.2376357056694813, |
| "grad_norm": 0.6616199623175774, |
| "learning_rate": 3.955823293172691e-05, |
| "loss": 0.4896, |
| "step": 197 |
| }, |
| { |
| "epoch": 0.2388419782870929, |
| "grad_norm": 0.6597217338301457, |
| "learning_rate": 3.9759036144578314e-05, |
| "loss": 0.5305, |
| "step": 198 |
| }, |
| { |
| "epoch": 0.24004825090470447, |
| "grad_norm": 0.40911035138284585, |
| "learning_rate": 3.995983935742972e-05, |
| "loss": 0.4845, |
| "step": 199 |
| }, |
| { |
| "epoch": 0.24125452352231605, |
| "grad_norm": 0.41051379447376024, |
| "learning_rate": 4.0160642570281125e-05, |
| "loss": 0.4943, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.24246079613992763, |
| "grad_norm": 0.4856554636844061, |
| "learning_rate": 4.036144578313254e-05, |
| "loss": 0.4589, |
| "step": 201 |
| }, |
| { |
| "epoch": 0.2436670687575392, |
| "grad_norm": 0.46595255361079024, |
| "learning_rate": 4.0562248995983936e-05, |
| "loss": 0.4901, |
| "step": 202 |
| }, |
| { |
| "epoch": 0.2448733413751508, |
| "grad_norm": 0.4817145604840749, |
| "learning_rate": 4.076305220883535e-05, |
| "loss": 0.4838, |
| "step": 203 |
| }, |
| { |
| "epoch": 0.24607961399276237, |
| "grad_norm": 0.4386965552097178, |
| "learning_rate": 4.0963855421686746e-05, |
| "loss": 0.4851, |
| "step": 204 |
| }, |
| { |
| "epoch": 0.24728588661037396, |
| "grad_norm": 0.4520118344205907, |
| "learning_rate": 4.116465863453816e-05, |
| "loss": 0.4983, |
| "step": 205 |
| }, |
| { |
| "epoch": 0.24849215922798554, |
| "grad_norm": 0.3614677524759272, |
| "learning_rate": 4.136546184738956e-05, |
| "loss": 0.4679, |
| "step": 206 |
| }, |
| { |
| "epoch": 0.24969843184559712, |
| "grad_norm": 0.49375547406597686, |
| "learning_rate": 4.156626506024097e-05, |
| "loss": 0.4667, |
| "step": 207 |
| }, |
| { |
| "epoch": 0.25090470446320867, |
| "grad_norm": 0.38672197120064633, |
| "learning_rate": 4.176706827309237e-05, |
| "loss": 0.4725, |
| "step": 208 |
| }, |
| { |
| "epoch": 0.25211097708082025, |
| "grad_norm": 0.5147301761091354, |
| "learning_rate": 4.196787148594378e-05, |
| "loss": 0.4781, |
| "step": 209 |
| }, |
| { |
| "epoch": 0.25331724969843183, |
| "grad_norm": 0.3812166626819943, |
| "learning_rate": 4.2168674698795186e-05, |
| "loss": 0.4805, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.2545235223160434, |
| "grad_norm": 0.4545562630855821, |
| "learning_rate": 4.236947791164659e-05, |
| "loss": 0.4762, |
| "step": 211 |
| }, |
| { |
| "epoch": 0.255729794933655, |
| "grad_norm": 0.436269716338551, |
| "learning_rate": 4.2570281124497996e-05, |
| "loss": 0.4797, |
| "step": 212 |
| }, |
| { |
| "epoch": 0.2569360675512666, |
| "grad_norm": 0.5384572866348979, |
| "learning_rate": 4.27710843373494e-05, |
| "loss": 0.4999, |
| "step": 213 |
| }, |
| { |
| "epoch": 0.25814234016887816, |
| "grad_norm": 0.5404751631725132, |
| "learning_rate": 4.297188755020081e-05, |
| "loss": 0.4798, |
| "step": 214 |
| }, |
| { |
| "epoch": 0.25934861278648974, |
| "grad_norm": 0.4429509321892677, |
| "learning_rate": 4.317269076305221e-05, |
| "loss": 0.5232, |
| "step": 215 |
| }, |
| { |
| "epoch": 0.2605548854041013, |
| "grad_norm": 0.5303855977581109, |
| "learning_rate": 4.337349397590362e-05, |
| "loss": 0.4884, |
| "step": 216 |
| }, |
| { |
| "epoch": 0.2617611580217129, |
| "grad_norm": 0.43125896662479185, |
| "learning_rate": 4.357429718875502e-05, |
| "loss": 0.4828, |
| "step": 217 |
| }, |
| { |
| "epoch": 0.2629674306393245, |
| "grad_norm": 0.438762700628616, |
| "learning_rate": 4.377510040160643e-05, |
| "loss": 0.4838, |
| "step": 218 |
| }, |
| { |
| "epoch": 0.26417370325693607, |
| "grad_norm": 0.4190780132169102, |
| "learning_rate": 4.3975903614457834e-05, |
| "loss": 0.4702, |
| "step": 219 |
| }, |
| { |
| "epoch": 0.26537997587454765, |
| "grad_norm": 0.3773440132348853, |
| "learning_rate": 4.417670682730924e-05, |
| "loss": 0.4942, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.26658624849215923, |
| "grad_norm": 0.47799490507514975, |
| "learning_rate": 4.4377510040160645e-05, |
| "loss": 0.4698, |
| "step": 221 |
| }, |
| { |
| "epoch": 0.2677925211097708, |
| "grad_norm": 0.37758247336865564, |
| "learning_rate": 4.457831325301205e-05, |
| "loss": 0.4638, |
| "step": 222 |
| }, |
| { |
| "epoch": 0.2689987937273824, |
| "grad_norm": 0.4474146827235853, |
| "learning_rate": 4.4779116465863456e-05, |
| "loss": 0.4852, |
| "step": 223 |
| }, |
| { |
| "epoch": 0.27020506634499397, |
| "grad_norm": 0.47716587130345856, |
| "learning_rate": 4.497991967871486e-05, |
| "loss": 0.4587, |
| "step": 224 |
| }, |
| { |
| "epoch": 0.27141133896260555, |
| "grad_norm": 0.5083388261649718, |
| "learning_rate": 4.5180722891566266e-05, |
| "loss": 0.4734, |
| "step": 225 |
| }, |
| { |
| "epoch": 0.27261761158021713, |
| "grad_norm": 0.46930543099228017, |
| "learning_rate": 4.538152610441767e-05, |
| "loss": 0.4749, |
| "step": 226 |
| }, |
| { |
| "epoch": 0.2738238841978287, |
| "grad_norm": 0.44360824898966233, |
| "learning_rate": 4.558232931726908e-05, |
| "loss": 0.488, |
| "step": 227 |
| }, |
| { |
| "epoch": 0.2750301568154403, |
| "grad_norm": 0.39902429944785056, |
| "learning_rate": 4.578313253012048e-05, |
| "loss": 0.4561, |
| "step": 228 |
| }, |
| { |
| "epoch": 0.2762364294330519, |
| "grad_norm": 0.4597398820977258, |
| "learning_rate": 4.598393574297189e-05, |
| "loss": 0.4593, |
| "step": 229 |
| }, |
| { |
| "epoch": 0.27744270205066346, |
| "grad_norm": 0.5413711861567382, |
| "learning_rate": 4.61847389558233e-05, |
| "loss": 0.4749, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.27864897466827504, |
| "grad_norm": 0.4631219743214568, |
| "learning_rate": 4.63855421686747e-05, |
| "loss": 0.4864, |
| "step": 231 |
| }, |
| { |
| "epoch": 0.2798552472858866, |
| "grad_norm": 0.4239404274562551, |
| "learning_rate": 4.658634538152611e-05, |
| "loss": 0.5028, |
| "step": 232 |
| }, |
| { |
| "epoch": 0.2810615199034982, |
| "grad_norm": 0.512533107003274, |
| "learning_rate": 4.678714859437751e-05, |
| "loss": 0.4529, |
| "step": 233 |
| }, |
| { |
| "epoch": 0.2822677925211098, |
| "grad_norm": 0.6384568999031756, |
| "learning_rate": 4.698795180722892e-05, |
| "loss": 0.4892, |
| "step": 234 |
| }, |
| { |
| "epoch": 0.28347406513872137, |
| "grad_norm": 0.7898314343362617, |
| "learning_rate": 4.718875502008032e-05, |
| "loss": 0.475, |
| "step": 235 |
| }, |
| { |
| "epoch": 0.28468033775633295, |
| "grad_norm": 0.5687679404684749, |
| "learning_rate": 4.738955823293173e-05, |
| "loss": 0.4751, |
| "step": 236 |
| }, |
| { |
| "epoch": 0.2858866103739445, |
| "grad_norm": 0.6318324876459296, |
| "learning_rate": 4.759036144578313e-05, |
| "loss": 0.4948, |
| "step": 237 |
| }, |
| { |
| "epoch": 0.2870928829915561, |
| "grad_norm": 0.6411579888771047, |
| "learning_rate": 4.779116465863454e-05, |
| "loss": 0.4862, |
| "step": 238 |
| }, |
| { |
| "epoch": 0.2882991556091677, |
| "grad_norm": 0.6548791719603922, |
| "learning_rate": 4.799196787148594e-05, |
| "loss": 0.4707, |
| "step": 239 |
| }, |
| { |
| "epoch": 0.28950542822677927, |
| "grad_norm": 0.48721678762737824, |
| "learning_rate": 4.8192771084337354e-05, |
| "loss": 0.4745, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.29071170084439085, |
| "grad_norm": 0.585841744027512, |
| "learning_rate": 4.839357429718876e-05, |
| "loss": 0.4771, |
| "step": 241 |
| }, |
| { |
| "epoch": 0.29191797346200243, |
| "grad_norm": 0.7343233388019949, |
| "learning_rate": 4.8594377510040165e-05, |
| "loss": 0.4586, |
| "step": 242 |
| }, |
| { |
| "epoch": 0.293124246079614, |
| "grad_norm": 0.7306993109383978, |
| "learning_rate": 4.879518072289157e-05, |
| "loss": 0.5018, |
| "step": 243 |
| }, |
| { |
| "epoch": 0.2943305186972256, |
| "grad_norm": 0.5752152211307707, |
| "learning_rate": 4.8995983935742975e-05, |
| "loss": 0.5027, |
| "step": 244 |
| }, |
| { |
| "epoch": 0.2955367913148372, |
| "grad_norm": 0.6954347040340547, |
| "learning_rate": 4.919678714859438e-05, |
| "loss": 0.4779, |
| "step": 245 |
| }, |
| { |
| "epoch": 0.29674306393244876, |
| "grad_norm": 0.9605672172965798, |
| "learning_rate": 4.9397590361445786e-05, |
| "loss": 0.495, |
| "step": 246 |
| }, |
| { |
| "epoch": 0.29794933655006034, |
| "grad_norm": 0.48111198217867024, |
| "learning_rate": 4.959839357429719e-05, |
| "loss": 0.4774, |
| "step": 247 |
| }, |
| { |
| "epoch": 0.2991556091676719, |
| "grad_norm": 0.6959734943206007, |
| "learning_rate": 4.97991967871486e-05, |
| "loss": 0.4748, |
| "step": 248 |
| }, |
| { |
| "epoch": 0.30036188178528345, |
| "grad_norm": 0.6307753559437208, |
| "learning_rate": 5e-05, |
| "loss": 0.4556, |
| "step": 249 |
| }, |
| { |
| "epoch": 0.30156815440289503, |
| "grad_norm": 0.5271890748977731, |
| "learning_rate": 4.997765862377123e-05, |
| "loss": 0.4959, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.3027744270205066, |
| "grad_norm": 0.4954669571709612, |
| "learning_rate": 4.995531724754245e-05, |
| "loss": 0.4636, |
| "step": 251 |
| }, |
| { |
| "epoch": 0.3039806996381182, |
| "grad_norm": 0.5362954459267386, |
| "learning_rate": 4.993297587131368e-05, |
| "loss": 0.4625, |
| "step": 252 |
| }, |
| { |
| "epoch": 0.30518697225572977, |
| "grad_norm": 0.5663830578871994, |
| "learning_rate": 4.99106344950849e-05, |
| "loss": 0.462, |
| "step": 253 |
| }, |
| { |
| "epoch": 0.30639324487334135, |
| "grad_norm": 0.5166937551194555, |
| "learning_rate": 4.988829311885612e-05, |
| "loss": 0.48, |
| "step": 254 |
| }, |
| { |
| "epoch": 0.30759951749095293, |
| "grad_norm": 0.45080582970761324, |
| "learning_rate": 4.986595174262735e-05, |
| "loss": 0.4765, |
| "step": 255 |
| }, |
| { |
| "epoch": 0.3088057901085645, |
| "grad_norm": 0.5488354412984369, |
| "learning_rate": 4.9843610366398574e-05, |
| "loss": 0.4824, |
| "step": 256 |
| }, |
| { |
| "epoch": 0.3100120627261761, |
| "grad_norm": 0.5669400193473989, |
| "learning_rate": 4.9821268990169795e-05, |
| "loss": 0.4503, |
| "step": 257 |
| }, |
| { |
| "epoch": 0.3112183353437877, |
| "grad_norm": 0.5115485943428845, |
| "learning_rate": 4.979892761394102e-05, |
| "loss": 0.4691, |
| "step": 258 |
| }, |
| { |
| "epoch": 0.31242460796139926, |
| "grad_norm": 0.5608966206639503, |
| "learning_rate": 4.977658623771225e-05, |
| "loss": 0.463, |
| "step": 259 |
| }, |
| { |
| "epoch": 0.31363088057901084, |
| "grad_norm": 0.4506008486154186, |
| "learning_rate": 4.975424486148347e-05, |
| "loss": 0.4882, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.3148371531966224, |
| "grad_norm": 0.6109184182493657, |
| "learning_rate": 4.973190348525469e-05, |
| "loss": 0.4814, |
| "step": 261 |
| }, |
| { |
| "epoch": 0.316043425814234, |
| "grad_norm": 0.44011566912817257, |
| "learning_rate": 4.970956210902592e-05, |
| "loss": 0.4702, |
| "step": 262 |
| }, |
| { |
| "epoch": 0.3172496984318456, |
| "grad_norm": 0.5082547699158838, |
| "learning_rate": 4.968722073279714e-05, |
| "loss": 0.4676, |
| "step": 263 |
| }, |
| { |
| "epoch": 0.31845597104945716, |
| "grad_norm": 0.4634121406839268, |
| "learning_rate": 4.966487935656837e-05, |
| "loss": 0.4814, |
| "step": 264 |
| }, |
| { |
| "epoch": 0.31966224366706875, |
| "grad_norm": 0.4613999245360435, |
| "learning_rate": 4.9642537980339595e-05, |
| "loss": 0.4892, |
| "step": 265 |
| }, |
| { |
| "epoch": 0.3208685162846803, |
| "grad_norm": 0.5407571756739682, |
| "learning_rate": 4.9620196604110815e-05, |
| "loss": 0.4765, |
| "step": 266 |
| }, |
| { |
| "epoch": 0.3220747889022919, |
| "grad_norm": 0.5588948249098938, |
| "learning_rate": 4.959785522788204e-05, |
| "loss": 0.4434, |
| "step": 267 |
| }, |
| { |
| "epoch": 0.3232810615199035, |
| "grad_norm": 0.48459990169582196, |
| "learning_rate": 4.9575513851653263e-05, |
| "loss": 0.482, |
| "step": 268 |
| }, |
| { |
| "epoch": 0.32448733413751507, |
| "grad_norm": 0.5203680763697215, |
| "learning_rate": 4.9553172475424484e-05, |
| "loss": 0.4561, |
| "step": 269 |
| }, |
| { |
| "epoch": 0.32569360675512665, |
| "grad_norm": 0.49517970790477517, |
| "learning_rate": 4.953083109919571e-05, |
| "loss": 0.4463, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.32689987937273823, |
| "grad_norm": 0.4496993668081543, |
| "learning_rate": 4.950848972296694e-05, |
| "loss": 0.4866, |
| "step": 271 |
| }, |
| { |
| "epoch": 0.3281061519903498, |
| "grad_norm": 0.5198314152754191, |
| "learning_rate": 4.948614834673816e-05, |
| "loss": 0.4736, |
| "step": 272 |
| }, |
| { |
| "epoch": 0.3293124246079614, |
| "grad_norm": 0.48165421751496224, |
| "learning_rate": 4.946380697050939e-05, |
| "loss": 0.4644, |
| "step": 273 |
| }, |
| { |
| "epoch": 0.330518697225573, |
| "grad_norm": 0.5017150374814877, |
| "learning_rate": 4.9441465594280615e-05, |
| "loss": 0.4714, |
| "step": 274 |
| }, |
| { |
| "epoch": 0.33172496984318456, |
| "grad_norm": 0.5125463646764701, |
| "learning_rate": 4.9419124218051835e-05, |
| "loss": 0.4783, |
| "step": 275 |
| }, |
| { |
| "epoch": 0.33293124246079614, |
| "grad_norm": 0.5118862848673149, |
| "learning_rate": 4.9396782841823056e-05, |
| "loss": 0.4506, |
| "step": 276 |
| }, |
| { |
| "epoch": 0.3341375150784077, |
| "grad_norm": 0.494848299345708, |
| "learning_rate": 4.9374441465594284e-05, |
| "loss": 0.4666, |
| "step": 277 |
| }, |
| { |
| "epoch": 0.3353437876960193, |
| "grad_norm": 0.574556205294029, |
| "learning_rate": 4.9352100089365504e-05, |
| "loss": 0.4715, |
| "step": 278 |
| }, |
| { |
| "epoch": 0.3365500603136309, |
| "grad_norm": 0.5013842930456912, |
| "learning_rate": 4.932975871313673e-05, |
| "loss": 0.4823, |
| "step": 279 |
| }, |
| { |
| "epoch": 0.33775633293124246, |
| "grad_norm": 0.575641680135763, |
| "learning_rate": 4.930741733690796e-05, |
| "loss": 0.4769, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.33896260554885405, |
| "grad_norm": 0.5073551828383931, |
| "learning_rate": 4.928507596067918e-05, |
| "loss": 0.4687, |
| "step": 281 |
| }, |
| { |
| "epoch": 0.3401688781664656, |
| "grad_norm": 0.6213137019127235, |
| "learning_rate": 4.92627345844504e-05, |
| "loss": 0.4912, |
| "step": 282 |
| }, |
| { |
| "epoch": 0.3413751507840772, |
| "grad_norm": 0.4650384893742426, |
| "learning_rate": 4.924039320822163e-05, |
| "loss": 0.4686, |
| "step": 283 |
| }, |
| { |
| "epoch": 0.3425814234016888, |
| "grad_norm": 0.411216907390994, |
| "learning_rate": 4.921805183199285e-05, |
| "loss": 0.4543, |
| "step": 284 |
| }, |
| { |
| "epoch": 0.34378769601930037, |
| "grad_norm": 0.462814550454254, |
| "learning_rate": 4.9195710455764076e-05, |
| "loss": 0.4691, |
| "step": 285 |
| }, |
| { |
| "epoch": 0.34499396863691195, |
| "grad_norm": 0.45055083119935785, |
| "learning_rate": 4.9173369079535304e-05, |
| "loss": 0.4648, |
| "step": 286 |
| }, |
| { |
| "epoch": 0.34620024125452353, |
| "grad_norm": 0.4070711196003684, |
| "learning_rate": 4.9151027703306525e-05, |
| "loss": 0.4558, |
| "step": 287 |
| }, |
| { |
| "epoch": 0.3474065138721351, |
| "grad_norm": 0.38539662658940144, |
| "learning_rate": 4.912868632707775e-05, |
| "loss": 0.4802, |
| "step": 288 |
| }, |
| { |
| "epoch": 0.3486127864897467, |
| "grad_norm": 0.5046587298061578, |
| "learning_rate": 4.910634495084897e-05, |
| "loss": 0.4901, |
| "step": 289 |
| }, |
| { |
| "epoch": 0.3498190591073583, |
| "grad_norm": 0.45014404701699506, |
| "learning_rate": 4.9084003574620193e-05, |
| "loss": 0.439, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.35102533172496986, |
| "grad_norm": 0.47834117803538234, |
| "learning_rate": 4.906166219839142e-05, |
| "loss": 0.4716, |
| "step": 291 |
| }, |
| { |
| "epoch": 0.35223160434258144, |
| "grad_norm": 0.6403594697917471, |
| "learning_rate": 4.903932082216265e-05, |
| "loss": 0.4449, |
| "step": 292 |
| }, |
| { |
| "epoch": 0.353437876960193, |
| "grad_norm": 0.5545894077440268, |
| "learning_rate": 4.901697944593387e-05, |
| "loss": 0.4991, |
| "step": 293 |
| }, |
| { |
| "epoch": 0.3546441495778046, |
| "grad_norm": 0.48207085217428985, |
| "learning_rate": 4.8994638069705097e-05, |
| "loss": 0.4424, |
| "step": 294 |
| }, |
| { |
| "epoch": 0.3558504221954162, |
| "grad_norm": 0.6531795184939722, |
| "learning_rate": 4.8972296693476324e-05, |
| "loss": 0.459, |
| "step": 295 |
| }, |
| { |
| "epoch": 0.35705669481302776, |
| "grad_norm": 0.658758404071505, |
| "learning_rate": 4.8949955317247545e-05, |
| "loss": 0.4812, |
| "step": 296 |
| }, |
| { |
| "epoch": 0.35826296743063935, |
| "grad_norm": 0.594767575961469, |
| "learning_rate": 4.8927613941018765e-05, |
| "loss": 0.4665, |
| "step": 297 |
| }, |
| { |
| "epoch": 0.3594692400482509, |
| "grad_norm": 0.5202270329599545, |
| "learning_rate": 4.890527256478999e-05, |
| "loss": 0.4507, |
| "step": 298 |
| }, |
| { |
| "epoch": 0.3606755126658625, |
| "grad_norm": 0.66410947267989, |
| "learning_rate": 4.8882931188561214e-05, |
| "loss": 0.4538, |
| "step": 299 |
| }, |
| { |
| "epoch": 0.3618817852834741, |
| "grad_norm": 0.688835455933309, |
| "learning_rate": 4.886058981233244e-05, |
| "loss": 0.4563, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.36308805790108567, |
| "grad_norm": 0.44276594605737246, |
| "learning_rate": 4.883824843610367e-05, |
| "loss": 0.4669, |
| "step": 301 |
| }, |
| { |
| "epoch": 0.36429433051869725, |
| "grad_norm": 0.6244042925092166, |
| "learning_rate": 4.881590705987489e-05, |
| "loss": 0.4597, |
| "step": 302 |
| }, |
| { |
| "epoch": 0.36550060313630883, |
| "grad_norm": 0.4282473469132016, |
| "learning_rate": 4.879356568364612e-05, |
| "loss": 0.4631, |
| "step": 303 |
| }, |
| { |
| "epoch": 0.36670687575392036, |
| "grad_norm": 0.49393564380494653, |
| "learning_rate": 4.877122430741734e-05, |
| "loss": 0.4622, |
| "step": 304 |
| }, |
| { |
| "epoch": 0.36791314837153194, |
| "grad_norm": 0.5754977153666446, |
| "learning_rate": 4.874888293118856e-05, |
| "loss": 0.4503, |
| "step": 305 |
| }, |
| { |
| "epoch": 0.3691194209891435, |
| "grad_norm": 0.43417824463420274, |
| "learning_rate": 4.8726541554959786e-05, |
| "loss": 0.4685, |
| "step": 306 |
| }, |
| { |
| "epoch": 0.3703256936067551, |
| "grad_norm": 0.5493384400309728, |
| "learning_rate": 4.870420017873101e-05, |
| "loss": 0.4484, |
| "step": 307 |
| }, |
| { |
| "epoch": 0.3715319662243667, |
| "grad_norm": 0.563798258109028, |
| "learning_rate": 4.8681858802502234e-05, |
| "loss": 0.4866, |
| "step": 308 |
| }, |
| { |
| "epoch": 0.37273823884197826, |
| "grad_norm": 0.40236230684499746, |
| "learning_rate": 4.865951742627346e-05, |
| "loss": 0.4426, |
| "step": 309 |
| }, |
| { |
| "epoch": 0.37394451145958985, |
| "grad_norm": 0.4610270253104838, |
| "learning_rate": 4.863717605004469e-05, |
| "loss": 0.4381, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.3751507840772014, |
| "grad_norm": 0.47654001944242114, |
| "learning_rate": 4.861483467381591e-05, |
| "loss": 0.4655, |
| "step": 311 |
| }, |
| { |
| "epoch": 0.376357056694813, |
| "grad_norm": 0.6151547050937779, |
| "learning_rate": 4.859249329758713e-05, |
| "loss": 0.472, |
| "step": 312 |
| }, |
| { |
| "epoch": 0.3775633293124246, |
| "grad_norm": 0.45232137136900996, |
| "learning_rate": 4.857015192135836e-05, |
| "loss": 0.46, |
| "step": 313 |
| }, |
| { |
| "epoch": 0.37876960193003617, |
| "grad_norm": 0.4506326312132236, |
| "learning_rate": 4.8547810545129585e-05, |
| "loss": 0.4573, |
| "step": 314 |
| }, |
| { |
| "epoch": 0.37997587454764775, |
| "grad_norm": 0.4983600614727651, |
| "learning_rate": 4.8525469168900806e-05, |
| "loss": 0.4748, |
| "step": 315 |
| }, |
| { |
| "epoch": 0.38118214716525933, |
| "grad_norm": 0.3879122279776259, |
| "learning_rate": 4.850312779267203e-05, |
| "loss": 0.4484, |
| "step": 316 |
| }, |
| { |
| "epoch": 0.3823884197828709, |
| "grad_norm": 0.4848285972697871, |
| "learning_rate": 4.848078641644326e-05, |
| "loss": 0.45, |
| "step": 317 |
| }, |
| { |
| "epoch": 0.3835946924004825, |
| "grad_norm": 0.4127959237985656, |
| "learning_rate": 4.845844504021448e-05, |
| "loss": 0.4388, |
| "step": 318 |
| }, |
| { |
| "epoch": 0.3848009650180941, |
| "grad_norm": 0.4474884654687781, |
| "learning_rate": 4.84361036639857e-05, |
| "loss": 0.5006, |
| "step": 319 |
| }, |
| { |
| "epoch": 0.38600723763570566, |
| "grad_norm": 0.4166281609224283, |
| "learning_rate": 4.841376228775693e-05, |
| "loss": 0.451, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.38721351025331724, |
| "grad_norm": 0.6167629036074882, |
| "learning_rate": 4.839142091152815e-05, |
| "loss": 0.4808, |
| "step": 321 |
| }, |
| { |
| "epoch": 0.3884197828709288, |
| "grad_norm": 0.46452397726219896, |
| "learning_rate": 4.836907953529938e-05, |
| "loss": 0.4519, |
| "step": 322 |
| }, |
| { |
| "epoch": 0.3896260554885404, |
| "grad_norm": 0.5077639670260757, |
| "learning_rate": 4.8346738159070605e-05, |
| "loss": 0.453, |
| "step": 323 |
| }, |
| { |
| "epoch": 0.390832328106152, |
| "grad_norm": 0.4492633198717073, |
| "learning_rate": 4.8324396782841826e-05, |
| "loss": 0.448, |
| "step": 324 |
| }, |
| { |
| "epoch": 0.39203860072376356, |
| "grad_norm": 0.5119837268347922, |
| "learning_rate": 4.8302055406613053e-05, |
| "loss": 0.4703, |
| "step": 325 |
| }, |
| { |
| "epoch": 0.39324487334137515, |
| "grad_norm": 0.46324744252555106, |
| "learning_rate": 4.8279714030384274e-05, |
| "loss": 0.4407, |
| "step": 326 |
| }, |
| { |
| "epoch": 0.3944511459589867, |
| "grad_norm": 0.4894638654542045, |
| "learning_rate": 4.8257372654155495e-05, |
| "loss": 0.4733, |
| "step": 327 |
| }, |
| { |
| "epoch": 0.3956574185765983, |
| "grad_norm": 0.46894954218311713, |
| "learning_rate": 4.823503127792672e-05, |
| "loss": 0.4617, |
| "step": 328 |
| }, |
| { |
| "epoch": 0.3968636911942099, |
| "grad_norm": 0.45217642275344966, |
| "learning_rate": 4.821268990169795e-05, |
| "loss": 0.464, |
| "step": 329 |
| }, |
| { |
| "epoch": 0.39806996381182147, |
| "grad_norm": 0.4961460092686353, |
| "learning_rate": 4.819034852546917e-05, |
| "loss": 0.461, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.39927623642943305, |
| "grad_norm": 0.46595217657545673, |
| "learning_rate": 4.81680071492404e-05, |
| "loss": 0.4704, |
| "step": 331 |
| }, |
| { |
| "epoch": 0.40048250904704463, |
| "grad_norm": 0.5559227542572431, |
| "learning_rate": 4.8145665773011625e-05, |
| "loss": 0.4903, |
| "step": 332 |
| }, |
| { |
| "epoch": 0.4016887816646562, |
| "grad_norm": 0.4948787429742588, |
| "learning_rate": 4.8123324396782846e-05, |
| "loss": 0.4715, |
| "step": 333 |
| }, |
| { |
| "epoch": 0.4028950542822678, |
| "grad_norm": 0.4970808248520605, |
| "learning_rate": 4.810098302055407e-05, |
| "loss": 0.4649, |
| "step": 334 |
| }, |
| { |
| "epoch": 0.4041013268998794, |
| "grad_norm": 0.4273747126680364, |
| "learning_rate": 4.8078641644325294e-05, |
| "loss": 0.4493, |
| "step": 335 |
| }, |
| { |
| "epoch": 0.40530759951749096, |
| "grad_norm": 0.50583259429466, |
| "learning_rate": 4.8056300268096515e-05, |
| "loss": 0.4623, |
| "step": 336 |
| }, |
| { |
| "epoch": 0.40651387213510254, |
| "grad_norm": 0.4297375922857019, |
| "learning_rate": 4.803395889186774e-05, |
| "loss": 0.4526, |
| "step": 337 |
| }, |
| { |
| "epoch": 0.4077201447527141, |
| "grad_norm": 0.35598323627722095, |
| "learning_rate": 4.801161751563897e-05, |
| "loss": 0.417, |
| "step": 338 |
| }, |
| { |
| "epoch": 0.4089264173703257, |
| "grad_norm": 0.39935840283688373, |
| "learning_rate": 4.798927613941019e-05, |
| "loss": 0.4326, |
| "step": 339 |
| }, |
| { |
| "epoch": 0.4101326899879373, |
| "grad_norm": 0.3622581622120651, |
| "learning_rate": 4.796693476318142e-05, |
| "loss": 0.4715, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.41133896260554886, |
| "grad_norm": 0.39214159703489104, |
| "learning_rate": 4.794459338695264e-05, |
| "loss": 0.4603, |
| "step": 341 |
| }, |
| { |
| "epoch": 0.41254523522316044, |
| "grad_norm": 0.386458432467993, |
| "learning_rate": 4.792225201072386e-05, |
| "loss": 0.4555, |
| "step": 342 |
| }, |
| { |
| "epoch": 0.413751507840772, |
| "grad_norm": 0.41568373206095827, |
| "learning_rate": 4.789991063449509e-05, |
| "loss": 0.4512, |
| "step": 343 |
| }, |
| { |
| "epoch": 0.4149577804583836, |
| "grad_norm": 0.360509732388755, |
| "learning_rate": 4.7877569258266315e-05, |
| "loss": 0.469, |
| "step": 344 |
| }, |
| { |
| "epoch": 0.4161640530759952, |
| "grad_norm": 0.39515906701596504, |
| "learning_rate": 4.7855227882037535e-05, |
| "loss": 0.443, |
| "step": 345 |
| }, |
| { |
| "epoch": 0.41737032569360677, |
| "grad_norm": 0.34843667675457657, |
| "learning_rate": 4.783288650580876e-05, |
| "loss": 0.4512, |
| "step": 346 |
| }, |
| { |
| "epoch": 0.41857659831121835, |
| "grad_norm": 0.4195610422690541, |
| "learning_rate": 4.781054512957998e-05, |
| "loss": 0.457, |
| "step": 347 |
| }, |
| { |
| "epoch": 0.41978287092882993, |
| "grad_norm": 0.37324898276685664, |
| "learning_rate": 4.7788203753351204e-05, |
| "loss": 0.4547, |
| "step": 348 |
| }, |
| { |
| "epoch": 0.4209891435464415, |
| "grad_norm": 0.5536452542682623, |
| "learning_rate": 4.776586237712243e-05, |
| "loss": 0.467, |
| "step": 349 |
| }, |
| { |
| "epoch": 0.4221954161640531, |
| "grad_norm": 0.39285135804997573, |
| "learning_rate": 4.774352100089366e-05, |
| "loss": 0.4462, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.4234016887816647, |
| "grad_norm": 0.48757905541108004, |
| "learning_rate": 4.772117962466488e-05, |
| "loss": 0.4537, |
| "step": 351 |
| }, |
| { |
| "epoch": 0.42460796139927626, |
| "grad_norm": 0.3968507691375903, |
| "learning_rate": 4.769883824843611e-05, |
| "loss": 0.4656, |
| "step": 352 |
| }, |
| { |
| "epoch": 0.42581423401688784, |
| "grad_norm": 0.521932683169365, |
| "learning_rate": 4.7676496872207335e-05, |
| "loss": 0.4346, |
| "step": 353 |
| }, |
| { |
| "epoch": 0.4270205066344994, |
| "grad_norm": 0.47502250284227127, |
| "learning_rate": 4.7654155495978555e-05, |
| "loss": 0.4586, |
| "step": 354 |
| }, |
| { |
| "epoch": 0.428226779252111, |
| "grad_norm": 0.4132516632500004, |
| "learning_rate": 4.7631814119749776e-05, |
| "loss": 0.4771, |
| "step": 355 |
| }, |
| { |
| "epoch": 0.4294330518697226, |
| "grad_norm": 0.4956716038670087, |
| "learning_rate": 4.7609472743521004e-05, |
| "loss": 0.5001, |
| "step": 356 |
| }, |
| { |
| "epoch": 0.43063932448733416, |
| "grad_norm": 0.43933494549593005, |
| "learning_rate": 4.7587131367292224e-05, |
| "loss": 0.4507, |
| "step": 357 |
| }, |
| { |
| "epoch": 0.43184559710494574, |
| "grad_norm": 0.4535428967390594, |
| "learning_rate": 4.756478999106345e-05, |
| "loss": 0.4487, |
| "step": 358 |
| }, |
| { |
| "epoch": 0.4330518697225573, |
| "grad_norm": 0.43344450609824314, |
| "learning_rate": 4.754244861483468e-05, |
| "loss": 0.4434, |
| "step": 359 |
| }, |
| { |
| "epoch": 0.43425814234016885, |
| "grad_norm": 0.39608164974537874, |
| "learning_rate": 4.75201072386059e-05, |
| "loss": 0.4463, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.43546441495778043, |
| "grad_norm": 0.4534682933209474, |
| "learning_rate": 4.749776586237713e-05, |
| "loss": 0.4704, |
| "step": 361 |
| }, |
| { |
| "epoch": 0.436670687575392, |
| "grad_norm": 0.40734523179872223, |
| "learning_rate": 4.747542448614835e-05, |
| "loss": 0.473, |
| "step": 362 |
| }, |
| { |
| "epoch": 0.4378769601930036, |
| "grad_norm": 0.41613899892629674, |
| "learning_rate": 4.745308310991957e-05, |
| "loss": 0.4487, |
| "step": 363 |
| }, |
| { |
| "epoch": 0.4390832328106152, |
| "grad_norm": 0.4705681098407308, |
| "learning_rate": 4.7430741733690796e-05, |
| "loss": 0.4487, |
| "step": 364 |
| }, |
| { |
| "epoch": 0.44028950542822676, |
| "grad_norm": 0.44175385002893386, |
| "learning_rate": 4.7408400357462024e-05, |
| "loss": 0.4412, |
| "step": 365 |
| }, |
| { |
| "epoch": 0.44149577804583834, |
| "grad_norm": 0.40907300632327315, |
| "learning_rate": 4.7386058981233244e-05, |
| "loss": 0.4434, |
| "step": 366 |
| }, |
| { |
| "epoch": 0.4427020506634499, |
| "grad_norm": 0.4293644058129011, |
| "learning_rate": 4.736371760500447e-05, |
| "loss": 0.4541, |
| "step": 367 |
| }, |
| { |
| "epoch": 0.4439083232810615, |
| "grad_norm": 0.4311778069623195, |
| "learning_rate": 4.73413762287757e-05, |
| "loss": 0.4849, |
| "step": 368 |
| }, |
| { |
| "epoch": 0.4451145958986731, |
| "grad_norm": 0.39533927643970806, |
| "learning_rate": 4.731903485254692e-05, |
| "loss": 0.4711, |
| "step": 369 |
| }, |
| { |
| "epoch": 0.44632086851628466, |
| "grad_norm": 0.43025595785491744, |
| "learning_rate": 4.729669347631814e-05, |
| "loss": 0.4343, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.44752714113389624, |
| "grad_norm": 0.4289663869921225, |
| "learning_rate": 4.727435210008937e-05, |
| "loss": 0.4721, |
| "step": 371 |
| }, |
| { |
| "epoch": 0.4487334137515078, |
| "grad_norm": 0.39948007331512414, |
| "learning_rate": 4.725201072386059e-05, |
| "loss": 0.4332, |
| "step": 372 |
| }, |
| { |
| "epoch": 0.4499396863691194, |
| "grad_norm": 0.4386452400368104, |
| "learning_rate": 4.7229669347631816e-05, |
| "loss": 0.4389, |
| "step": 373 |
| }, |
| { |
| "epoch": 0.451145958986731, |
| "grad_norm": 0.4262661814196975, |
| "learning_rate": 4.7207327971403044e-05, |
| "loss": 0.4458, |
| "step": 374 |
| }, |
| { |
| "epoch": 0.45235223160434257, |
| "grad_norm": 0.37931784449502054, |
| "learning_rate": 4.7184986595174265e-05, |
| "loss": 0.4495, |
| "step": 375 |
| }, |
| { |
| "epoch": 0.45355850422195415, |
| "grad_norm": 0.4524128475128064, |
| "learning_rate": 4.716264521894549e-05, |
| "loss": 0.4455, |
| "step": 376 |
| }, |
| { |
| "epoch": 0.45476477683956573, |
| "grad_norm": 0.3838605212579755, |
| "learning_rate": 4.714030384271671e-05, |
| "loss": 0.4432, |
| "step": 377 |
| }, |
| { |
| "epoch": 0.4559710494571773, |
| "grad_norm": 0.45065400120393345, |
| "learning_rate": 4.7117962466487934e-05, |
| "loss": 0.4746, |
| "step": 378 |
| }, |
| { |
| "epoch": 0.4571773220747889, |
| "grad_norm": 0.41737828415366907, |
| "learning_rate": 4.709562109025916e-05, |
| "loss": 0.4554, |
| "step": 379 |
| }, |
| { |
| "epoch": 0.4583835946924005, |
| "grad_norm": 0.4654886210274436, |
| "learning_rate": 4.707327971403039e-05, |
| "loss": 0.4628, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.45958986731001206, |
| "grad_norm": 0.46471916366661375, |
| "learning_rate": 4.705093833780161e-05, |
| "loss": 0.4705, |
| "step": 381 |
| }, |
| { |
| "epoch": 0.46079613992762364, |
| "grad_norm": 0.4909372293107839, |
| "learning_rate": 4.702859696157284e-05, |
| "loss": 0.4428, |
| "step": 382 |
| }, |
| { |
| "epoch": 0.4620024125452352, |
| "grad_norm": 0.4809948968908599, |
| "learning_rate": 4.7006255585344064e-05, |
| "loss": 0.4729, |
| "step": 383 |
| }, |
| { |
| "epoch": 0.4632086851628468, |
| "grad_norm": 0.395516138642914, |
| "learning_rate": 4.6983914209115285e-05, |
| "loss": 0.453, |
| "step": 384 |
| }, |
| { |
| "epoch": 0.4644149577804584, |
| "grad_norm": 0.5130350022661608, |
| "learning_rate": 4.6961572832886506e-05, |
| "loss": 0.4491, |
| "step": 385 |
| }, |
| { |
| "epoch": 0.46562123039806996, |
| "grad_norm": 0.4210327177360951, |
| "learning_rate": 4.693923145665773e-05, |
| "loss": 0.4394, |
| "step": 386 |
| }, |
| { |
| "epoch": 0.46682750301568154, |
| "grad_norm": 0.43805602941227245, |
| "learning_rate": 4.6916890080428954e-05, |
| "loss": 0.4498, |
| "step": 387 |
| }, |
| { |
| "epoch": 0.4680337756332931, |
| "grad_norm": 0.4374100581941471, |
| "learning_rate": 4.689454870420018e-05, |
| "loss": 0.4429, |
| "step": 388 |
| }, |
| { |
| "epoch": 0.4692400482509047, |
| "grad_norm": 0.42690169548391277, |
| "learning_rate": 4.687220732797141e-05, |
| "loss": 0.4486, |
| "step": 389 |
| }, |
| { |
| "epoch": 0.4704463208685163, |
| "grad_norm": 0.566544160347167, |
| "learning_rate": 4.684986595174263e-05, |
| "loss": 0.4351, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.47165259348612787, |
| "grad_norm": 0.5463676682693923, |
| "learning_rate": 4.682752457551386e-05, |
| "loss": 0.4475, |
| "step": 391 |
| }, |
| { |
| "epoch": 0.47285886610373945, |
| "grad_norm": 0.5744958747454104, |
| "learning_rate": 4.680518319928508e-05, |
| "loss": 0.4402, |
| "step": 392 |
| }, |
| { |
| "epoch": 0.47406513872135103, |
| "grad_norm": 0.35516581962807914, |
| "learning_rate": 4.67828418230563e-05, |
| "loss": 0.4536, |
| "step": 393 |
| }, |
| { |
| "epoch": 0.4752714113389626, |
| "grad_norm": 0.5946852579555668, |
| "learning_rate": 4.6760500446827526e-05, |
| "loss": 0.4592, |
| "step": 394 |
| }, |
| { |
| "epoch": 0.4764776839565742, |
| "grad_norm": 0.47812875942111466, |
| "learning_rate": 4.673815907059875e-05, |
| "loss": 0.4491, |
| "step": 395 |
| }, |
| { |
| "epoch": 0.4776839565741858, |
| "grad_norm": 0.44836535011830225, |
| "learning_rate": 4.6715817694369974e-05, |
| "loss": 0.456, |
| "step": 396 |
| }, |
| { |
| "epoch": 0.47889022919179736, |
| "grad_norm": 0.5656700294363519, |
| "learning_rate": 4.66934763181412e-05, |
| "loss": 0.4697, |
| "step": 397 |
| }, |
| { |
| "epoch": 0.48009650180940894, |
| "grad_norm": 0.4300488393072486, |
| "learning_rate": 4.667113494191243e-05, |
| "loss": 0.4495, |
| "step": 398 |
| }, |
| { |
| "epoch": 0.4813027744270205, |
| "grad_norm": 0.5038511492658745, |
| "learning_rate": 4.664879356568364e-05, |
| "loss": 0.4619, |
| "step": 399 |
| }, |
| { |
| "epoch": 0.4825090470446321, |
| "grad_norm": 0.3821023359414941, |
| "learning_rate": 4.662645218945487e-05, |
| "loss": 0.4335, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.4837153196622437, |
| "grad_norm": 0.45116254617278473, |
| "learning_rate": 4.66041108132261e-05, |
| "loss": 0.4664, |
| "step": 401 |
| }, |
| { |
| "epoch": 0.48492159227985526, |
| "grad_norm": 0.4695210682061351, |
| "learning_rate": 4.658176943699732e-05, |
| "loss": 0.4458, |
| "step": 402 |
| }, |
| { |
| "epoch": 0.48612786489746684, |
| "grad_norm": 0.37016930218926347, |
| "learning_rate": 4.6559428060768546e-05, |
| "loss": 0.4485, |
| "step": 403 |
| }, |
| { |
| "epoch": 0.4873341375150784, |
| "grad_norm": 0.48562834405762084, |
| "learning_rate": 4.653708668453977e-05, |
| "loss": 0.4523, |
| "step": 404 |
| }, |
| { |
| "epoch": 0.48854041013269, |
| "grad_norm": 0.42419760007250784, |
| "learning_rate": 4.6514745308310994e-05, |
| "loss": 0.4512, |
| "step": 405 |
| }, |
| { |
| "epoch": 0.4897466827503016, |
| "grad_norm": 0.5247057241517089, |
| "learning_rate": 4.6492403932082215e-05, |
| "loss": 0.4755, |
| "step": 406 |
| }, |
| { |
| "epoch": 0.49095295536791317, |
| "grad_norm": 0.4906512258804201, |
| "learning_rate": 4.647006255585344e-05, |
| "loss": 0.4752, |
| "step": 407 |
| }, |
| { |
| "epoch": 0.49215922798552475, |
| "grad_norm": 0.376373380011517, |
| "learning_rate": 4.644772117962466e-05, |
| "loss": 0.431, |
| "step": 408 |
| }, |
| { |
| "epoch": 0.49336550060313633, |
| "grad_norm": 0.41098879886311784, |
| "learning_rate": 4.642537980339589e-05, |
| "loss": 0.445, |
| "step": 409 |
| }, |
| { |
| "epoch": 0.4945717732207479, |
| "grad_norm": 0.45803591010235595, |
| "learning_rate": 4.640303842716712e-05, |
| "loss": 0.4729, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.4957780458383595, |
| "grad_norm": 0.41767185914642463, |
| "learning_rate": 4.638069705093834e-05, |
| "loss": 0.4545, |
| "step": 411 |
| }, |
| { |
| "epoch": 0.4969843184559711, |
| "grad_norm": 0.38004435364145217, |
| "learning_rate": 4.6358355674709566e-05, |
| "loss": 0.4604, |
| "step": 412 |
| }, |
| { |
| "epoch": 0.49819059107358266, |
| "grad_norm": 0.46482447063204524, |
| "learning_rate": 4.633601429848079e-05, |
| "loss": 0.4256, |
| "step": 413 |
| }, |
| { |
| "epoch": 0.49939686369119424, |
| "grad_norm": 0.35230303342753366, |
| "learning_rate": 4.631367292225201e-05, |
| "loss": 0.4576, |
| "step": 414 |
| }, |
| { |
| "epoch": 0.5006031363088058, |
| "grad_norm": 0.466911404514305, |
| "learning_rate": 4.6291331546023235e-05, |
| "loss": 0.4301, |
| "step": 415 |
| }, |
| { |
| "epoch": 0.5018094089264173, |
| "grad_norm": 0.44691606084067054, |
| "learning_rate": 4.626899016979446e-05, |
| "loss": 0.4506, |
| "step": 416 |
| }, |
| { |
| "epoch": 0.503015681544029, |
| "grad_norm": 0.39651239023018436, |
| "learning_rate": 4.624664879356568e-05, |
| "loss": 0.4423, |
| "step": 417 |
| }, |
| { |
| "epoch": 0.5042219541616405, |
| "grad_norm": 0.478399072958898, |
| "learning_rate": 4.622430741733691e-05, |
| "loss": 0.4547, |
| "step": 418 |
| }, |
| { |
| "epoch": 0.5054282267792521, |
| "grad_norm": 0.4641367903464798, |
| "learning_rate": 4.620196604110814e-05, |
| "loss": 0.4485, |
| "step": 419 |
| }, |
| { |
| "epoch": 0.5066344993968637, |
| "grad_norm": 0.3978320017768976, |
| "learning_rate": 4.617962466487936e-05, |
| "loss": 0.4495, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.5078407720144753, |
| "grad_norm": 0.4867811541317465, |
| "learning_rate": 4.615728328865058e-05, |
| "loss": 0.4823, |
| "step": 421 |
| }, |
| { |
| "epoch": 0.5090470446320868, |
| "grad_norm": 0.37579542830012624, |
| "learning_rate": 4.613494191242181e-05, |
| "loss": 0.4562, |
| "step": 422 |
| }, |
| { |
| "epoch": 0.5102533172496985, |
| "grad_norm": 0.3933322015028353, |
| "learning_rate": 4.611260053619303e-05, |
| "loss": 0.4697, |
| "step": 423 |
| }, |
| { |
| "epoch": 0.51145958986731, |
| "grad_norm": 0.5140813767762246, |
| "learning_rate": 4.6090259159964255e-05, |
| "loss": 0.4539, |
| "step": 424 |
| }, |
| { |
| "epoch": 0.5126658624849216, |
| "grad_norm": 0.4293323108323189, |
| "learning_rate": 4.606791778373548e-05, |
| "loss": 0.4688, |
| "step": 425 |
| }, |
| { |
| "epoch": 0.5138721351025332, |
| "grad_norm": 0.4037976218720339, |
| "learning_rate": 4.60455764075067e-05, |
| "loss": 0.433, |
| "step": 426 |
| }, |
| { |
| "epoch": 0.5150784077201448, |
| "grad_norm": 0.49210382273054565, |
| "learning_rate": 4.602323503127793e-05, |
| "loss": 0.4721, |
| "step": 427 |
| }, |
| { |
| "epoch": 0.5162846803377563, |
| "grad_norm": 0.45360571122605636, |
| "learning_rate": 4.600089365504915e-05, |
| "loss": 0.4417, |
| "step": 428 |
| }, |
| { |
| "epoch": 0.517490952955368, |
| "grad_norm": 0.43167468125357994, |
| "learning_rate": 4.597855227882037e-05, |
| "loss": 0.4431, |
| "step": 429 |
| }, |
| { |
| "epoch": 0.5186972255729795, |
| "grad_norm": 0.4333047936227905, |
| "learning_rate": 4.59562109025916e-05, |
| "loss": 0.4513, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.5199034981905911, |
| "grad_norm": 0.37541607482723177, |
| "learning_rate": 4.593386952636283e-05, |
| "loss": 0.4379, |
| "step": 431 |
| }, |
| { |
| "epoch": 0.5211097708082026, |
| "grad_norm": 0.43350699006432963, |
| "learning_rate": 4.591152815013405e-05, |
| "loss": 0.4465, |
| "step": 432 |
| }, |
| { |
| "epoch": 0.5223160434258143, |
| "grad_norm": 0.3396586628116001, |
| "learning_rate": 4.5889186773905275e-05, |
| "loss": 0.4658, |
| "step": 433 |
| }, |
| { |
| "epoch": 0.5235223160434258, |
| "grad_norm": 0.39404427630025723, |
| "learning_rate": 4.58668453976765e-05, |
| "loss": 0.4665, |
| "step": 434 |
| }, |
| { |
| "epoch": 0.5247285886610374, |
| "grad_norm": 0.66647772071702, |
| "learning_rate": 4.5844504021447723e-05, |
| "loss": 0.476, |
| "step": 435 |
| }, |
| { |
| "epoch": 0.525934861278649, |
| "grad_norm": 0.41590803227411977, |
| "learning_rate": 4.5822162645218944e-05, |
| "loss": 0.4628, |
| "step": 436 |
| }, |
| { |
| "epoch": 0.5271411338962606, |
| "grad_norm": 0.3535047122980892, |
| "learning_rate": 4.579982126899017e-05, |
| "loss": 0.463, |
| "step": 437 |
| }, |
| { |
| "epoch": 0.5283474065138721, |
| "grad_norm": 0.3984301763075602, |
| "learning_rate": 4.57774798927614e-05, |
| "loss": 0.4477, |
| "step": 438 |
| }, |
| { |
| "epoch": 0.5295536791314838, |
| "grad_norm": 0.41893046808056794, |
| "learning_rate": 4.575513851653262e-05, |
| "loss": 0.4408, |
| "step": 439 |
| }, |
| { |
| "epoch": 0.5307599517490953, |
| "grad_norm": 0.3462986350174594, |
| "learning_rate": 4.573279714030385e-05, |
| "loss": 0.4242, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.5319662243667069, |
| "grad_norm": 0.46322150308522203, |
| "learning_rate": 4.5710455764075075e-05, |
| "loss": 0.449, |
| "step": 441 |
| }, |
| { |
| "epoch": 0.5331724969843185, |
| "grad_norm": 0.5067918359886772, |
| "learning_rate": 4.5688114387846295e-05, |
| "loss": 0.4825, |
| "step": 442 |
| }, |
| { |
| "epoch": 0.53437876960193, |
| "grad_norm": 0.45997864303535113, |
| "learning_rate": 4.5665773011617516e-05, |
| "loss": 0.4324, |
| "step": 443 |
| }, |
| { |
| "epoch": 0.5355850422195416, |
| "grad_norm": 0.5398561931583092, |
| "learning_rate": 4.5643431635388744e-05, |
| "loss": 0.4689, |
| "step": 444 |
| }, |
| { |
| "epoch": 0.5367913148371531, |
| "grad_norm": 0.3868465771958736, |
| "learning_rate": 4.5621090259159964e-05, |
| "loss": 0.4572, |
| "step": 445 |
| }, |
| { |
| "epoch": 0.5379975874547648, |
| "grad_norm": 0.4419588081800375, |
| "learning_rate": 4.559874888293119e-05, |
| "loss": 0.4345, |
| "step": 446 |
| }, |
| { |
| "epoch": 0.5392038600723763, |
| "grad_norm": 0.3578794337535823, |
| "learning_rate": 4.557640750670242e-05, |
| "loss": 0.4445, |
| "step": 447 |
| }, |
| { |
| "epoch": 0.5404101326899879, |
| "grad_norm": 0.47193397921160224, |
| "learning_rate": 4.555406613047364e-05, |
| "loss": 0.4675, |
| "step": 448 |
| }, |
| { |
| "epoch": 0.5416164053075995, |
| "grad_norm": 0.36285549390237526, |
| "learning_rate": 4.553172475424487e-05, |
| "loss": 0.4495, |
| "step": 449 |
| }, |
| { |
| "epoch": 0.5428226779252111, |
| "grad_norm": 0.3989213205809804, |
| "learning_rate": 4.550938337801609e-05, |
| "loss": 0.4361, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.5440289505428226, |
| "grad_norm": 0.3664013991140493, |
| "learning_rate": 4.548704200178731e-05, |
| "loss": 0.4648, |
| "step": 451 |
| }, |
| { |
| "epoch": 0.5452352231604343, |
| "grad_norm": 0.3882957125804488, |
| "learning_rate": 4.5464700625558536e-05, |
| "loss": 0.4432, |
| "step": 452 |
| }, |
| { |
| "epoch": 0.5464414957780458, |
| "grad_norm": 0.41892351772052466, |
| "learning_rate": 4.5442359249329764e-05, |
| "loss": 0.4371, |
| "step": 453 |
| }, |
| { |
| "epoch": 0.5476477683956574, |
| "grad_norm": 0.39080990222858825, |
| "learning_rate": 4.5420017873100985e-05, |
| "loss": 0.4366, |
| "step": 454 |
| }, |
| { |
| "epoch": 0.548854041013269, |
| "grad_norm": 0.41778012912126616, |
| "learning_rate": 4.539767649687221e-05, |
| "loss": 0.4551, |
| "step": 455 |
| }, |
| { |
| "epoch": 0.5500603136308806, |
| "grad_norm": 0.36612593843186386, |
| "learning_rate": 4.537533512064344e-05, |
| "loss": 0.4333, |
| "step": 456 |
| }, |
| { |
| "epoch": 0.5512665862484921, |
| "grad_norm": 0.40433234185282513, |
| "learning_rate": 4.535299374441466e-05, |
| "loss": 0.4409, |
| "step": 457 |
| }, |
| { |
| "epoch": 0.5524728588661038, |
| "grad_norm": 0.7444650694896087, |
| "learning_rate": 4.533065236818588e-05, |
| "loss": 0.4561, |
| "step": 458 |
| }, |
| { |
| "epoch": 0.5536791314837153, |
| "grad_norm": 0.4116764564963248, |
| "learning_rate": 4.530831099195711e-05, |
| "loss": 0.4389, |
| "step": 459 |
| }, |
| { |
| "epoch": 0.5548854041013269, |
| "grad_norm": 0.38464112606556955, |
| "learning_rate": 4.528596961572833e-05, |
| "loss": 0.4512, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.5560916767189384, |
| "grad_norm": 0.39310293733158486, |
| "learning_rate": 4.5263628239499557e-05, |
| "loss": 0.4466, |
| "step": 461 |
| }, |
| { |
| "epoch": 0.5572979493365501, |
| "grad_norm": 0.41483308376955597, |
| "learning_rate": 4.5241286863270784e-05, |
| "loss": 0.4452, |
| "step": 462 |
| }, |
| { |
| "epoch": 0.5585042219541616, |
| "grad_norm": 0.3809153812654458, |
| "learning_rate": 4.5218945487042005e-05, |
| "loss": 0.4531, |
| "step": 463 |
| }, |
| { |
| "epoch": 0.5597104945717732, |
| "grad_norm": 0.39580214313931394, |
| "learning_rate": 4.5196604110813225e-05, |
| "loss": 0.4635, |
| "step": 464 |
| }, |
| { |
| "epoch": 0.5609167671893848, |
| "grad_norm": 0.36413357758862114, |
| "learning_rate": 4.517426273458445e-05, |
| "loss": 0.4472, |
| "step": 465 |
| }, |
| { |
| "epoch": 0.5621230398069964, |
| "grad_norm": 0.3308446731299311, |
| "learning_rate": 4.5151921358355674e-05, |
| "loss": 0.425, |
| "step": 466 |
| }, |
| { |
| "epoch": 0.5633293124246079, |
| "grad_norm": 0.3281583052827164, |
| "learning_rate": 4.51295799821269e-05, |
| "loss": 0.43, |
| "step": 467 |
| }, |
| { |
| "epoch": 0.5645355850422196, |
| "grad_norm": 0.40183980071492237, |
| "learning_rate": 4.510723860589813e-05, |
| "loss": 0.4579, |
| "step": 468 |
| }, |
| { |
| "epoch": 0.5657418576598311, |
| "grad_norm": 0.42058205952983124, |
| "learning_rate": 4.508489722966935e-05, |
| "loss": 0.4631, |
| "step": 469 |
| }, |
| { |
| "epoch": 0.5669481302774427, |
| "grad_norm": 0.34646556614800056, |
| "learning_rate": 4.506255585344058e-05, |
| "loss": 0.4694, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.5681544028950543, |
| "grad_norm": 0.45794848334831695, |
| "learning_rate": 4.50402144772118e-05, |
| "loss": 0.4425, |
| "step": 471 |
| }, |
| { |
| "epoch": 0.5693606755126659, |
| "grad_norm": 0.34739688810650715, |
| "learning_rate": 4.501787310098302e-05, |
| "loss": 0.4496, |
| "step": 472 |
| }, |
| { |
| "epoch": 0.5705669481302774, |
| "grad_norm": 0.5010209658550312, |
| "learning_rate": 4.4995531724754246e-05, |
| "loss": 0.4446, |
| "step": 473 |
| }, |
| { |
| "epoch": 0.571773220747889, |
| "grad_norm": 0.5164932286032147, |
| "learning_rate": 4.497319034852547e-05, |
| "loss": 0.4275, |
| "step": 474 |
| }, |
| { |
| "epoch": 0.5729794933655006, |
| "grad_norm": 0.3736542241126828, |
| "learning_rate": 4.4950848972296694e-05, |
| "loss": 0.4285, |
| "step": 475 |
| }, |
| { |
| "epoch": 0.5741857659831122, |
| "grad_norm": 0.4110262572490399, |
| "learning_rate": 4.492850759606792e-05, |
| "loss": 0.4561, |
| "step": 476 |
| }, |
| { |
| "epoch": 0.5753920386007237, |
| "grad_norm": 1.13426364723185, |
| "learning_rate": 4.490616621983915e-05, |
| "loss": 0.436, |
| "step": 477 |
| }, |
| { |
| "epoch": 0.5765983112183354, |
| "grad_norm": 0.3844019173292291, |
| "learning_rate": 4.488382484361037e-05, |
| "loss": 0.4502, |
| "step": 478 |
| }, |
| { |
| "epoch": 0.5778045838359469, |
| "grad_norm": 0.43306080304122824, |
| "learning_rate": 4.486148346738159e-05, |
| "loss": 0.4297, |
| "step": 479 |
| }, |
| { |
| "epoch": 0.5790108564535585, |
| "grad_norm": 0.3597160387122504, |
| "learning_rate": 4.483914209115282e-05, |
| "loss": 0.445, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.5802171290711701, |
| "grad_norm": 0.3756419648637461, |
| "learning_rate": 4.481680071492404e-05, |
| "loss": 0.4273, |
| "step": 481 |
| }, |
| { |
| "epoch": 0.5814234016887817, |
| "grad_norm": 0.37035532088343315, |
| "learning_rate": 4.4794459338695266e-05, |
| "loss": 0.4654, |
| "step": 482 |
| }, |
| { |
| "epoch": 0.5826296743063932, |
| "grad_norm": 0.3807674777156649, |
| "learning_rate": 4.477211796246649e-05, |
| "loss": 0.4123, |
| "step": 483 |
| }, |
| { |
| "epoch": 0.5838359469240049, |
| "grad_norm": 0.4624416203917337, |
| "learning_rate": 4.4749776586237714e-05, |
| "loss": 0.4483, |
| "step": 484 |
| }, |
| { |
| "epoch": 0.5850422195416164, |
| "grad_norm": 0.40382677830929836, |
| "learning_rate": 4.472743521000894e-05, |
| "loss": 0.451, |
| "step": 485 |
| }, |
| { |
| "epoch": 0.586248492159228, |
| "grad_norm": 0.44548308481777593, |
| "learning_rate": 4.470509383378016e-05, |
| "loss": 0.4421, |
| "step": 486 |
| }, |
| { |
| "epoch": 0.5874547647768396, |
| "grad_norm": 0.38837454887772743, |
| "learning_rate": 4.468275245755138e-05, |
| "loss": 0.4473, |
| "step": 487 |
| }, |
| { |
| "epoch": 0.5886610373944512, |
| "grad_norm": 0.5034381554902535, |
| "learning_rate": 4.466041108132261e-05, |
| "loss": 0.4601, |
| "step": 488 |
| }, |
| { |
| "epoch": 0.5898673100120627, |
| "grad_norm": 0.5549913556398447, |
| "learning_rate": 4.463806970509384e-05, |
| "loss": 0.4501, |
| "step": 489 |
| }, |
| { |
| "epoch": 0.5910735826296744, |
| "grad_norm": 0.46407716031475194, |
| "learning_rate": 4.461572832886506e-05, |
| "loss": 0.4204, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.5922798552472859, |
| "grad_norm": 0.4213356267800612, |
| "learning_rate": 4.4593386952636286e-05, |
| "loss": 0.4049, |
| "step": 491 |
| }, |
| { |
| "epoch": 0.5934861278648975, |
| "grad_norm": 0.5095914474336406, |
| "learning_rate": 4.4571045576407513e-05, |
| "loss": 0.4342, |
| "step": 492 |
| }, |
| { |
| "epoch": 0.594692400482509, |
| "grad_norm": 0.41864465345580903, |
| "learning_rate": 4.4548704200178734e-05, |
| "loss": 0.4376, |
| "step": 493 |
| }, |
| { |
| "epoch": 0.5958986731001207, |
| "grad_norm": 0.5958745960318322, |
| "learning_rate": 4.4526362823949955e-05, |
| "loss": 0.4447, |
| "step": 494 |
| }, |
| { |
| "epoch": 0.5971049457177322, |
| "grad_norm": 0.49437930231053906, |
| "learning_rate": 4.450402144772118e-05, |
| "loss": 0.4306, |
| "step": 495 |
| }, |
| { |
| "epoch": 0.5983112183353438, |
| "grad_norm": 0.4453966068547487, |
| "learning_rate": 4.44816800714924e-05, |
| "loss": 0.4574, |
| "step": 496 |
| }, |
| { |
| "epoch": 0.5995174909529554, |
| "grad_norm": 0.5804778387940887, |
| "learning_rate": 4.445933869526363e-05, |
| "loss": 0.4456, |
| "step": 497 |
| }, |
| { |
| "epoch": 0.6007237635705669, |
| "grad_norm": 0.513573961932933, |
| "learning_rate": 4.443699731903486e-05, |
| "loss": 0.43, |
| "step": 498 |
| }, |
| { |
| "epoch": 0.6019300361881785, |
| "grad_norm": 0.5536446946147545, |
| "learning_rate": 4.441465594280608e-05, |
| "loss": 0.4433, |
| "step": 499 |
| }, |
| { |
| "epoch": 0.6031363088057901, |
| "grad_norm": 0.422156053238679, |
| "learning_rate": 4.4392314566577306e-05, |
| "loss": 0.427, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.6043425814234017, |
| "grad_norm": 0.3977480968975122, |
| "learning_rate": 4.436997319034853e-05, |
| "loss": 0.4286, |
| "step": 501 |
| }, |
| { |
| "epoch": 0.6055488540410132, |
| "grad_norm": 0.38578431608091396, |
| "learning_rate": 4.434763181411975e-05, |
| "loss": 0.4397, |
| "step": 502 |
| }, |
| { |
| "epoch": 0.6067551266586249, |
| "grad_norm": 0.4371318250540829, |
| "learning_rate": 4.4325290437890975e-05, |
| "loss": 0.4497, |
| "step": 503 |
| }, |
| { |
| "epoch": 0.6079613992762364, |
| "grad_norm": 0.38750843806678476, |
| "learning_rate": 4.43029490616622e-05, |
| "loss": 0.4515, |
| "step": 504 |
| }, |
| { |
| "epoch": 0.609167671893848, |
| "grad_norm": 0.3184771182337021, |
| "learning_rate": 4.428060768543342e-05, |
| "loss": 0.4226, |
| "step": 505 |
| }, |
| { |
| "epoch": 0.6103739445114595, |
| "grad_norm": 0.3349430507766991, |
| "learning_rate": 4.425826630920465e-05, |
| "loss": 0.4412, |
| "step": 506 |
| }, |
| { |
| "epoch": 0.6115802171290712, |
| "grad_norm": 0.460700030502988, |
| "learning_rate": 4.423592493297588e-05, |
| "loss": 0.4191, |
| "step": 507 |
| }, |
| { |
| "epoch": 0.6127864897466827, |
| "grad_norm": 0.3474525155837308, |
| "learning_rate": 4.42135835567471e-05, |
| "loss": 0.4433, |
| "step": 508 |
| }, |
| { |
| "epoch": 0.6139927623642943, |
| "grad_norm": 0.3890384095888057, |
| "learning_rate": 4.419124218051832e-05, |
| "loss": 0.4471, |
| "step": 509 |
| }, |
| { |
| "epoch": 0.6151990349819059, |
| "grad_norm": 0.3473241563303528, |
| "learning_rate": 4.416890080428955e-05, |
| "loss": 0.4333, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.6164053075995175, |
| "grad_norm": 0.38649792348349676, |
| "learning_rate": 4.414655942806077e-05, |
| "loss": 0.4401, |
| "step": 511 |
| }, |
| { |
| "epoch": 0.617611580217129, |
| "grad_norm": 0.390326168737818, |
| "learning_rate": 4.4124218051831995e-05, |
| "loss": 0.4415, |
| "step": 512 |
| }, |
| { |
| "epoch": 0.6188178528347407, |
| "grad_norm": 0.36517910787407803, |
| "learning_rate": 4.410187667560322e-05, |
| "loss": 0.4161, |
| "step": 513 |
| }, |
| { |
| "epoch": 0.6200241254523522, |
| "grad_norm": 0.3871202882793126, |
| "learning_rate": 4.4079535299374443e-05, |
| "loss": 0.4388, |
| "step": 514 |
| }, |
| { |
| "epoch": 0.6212303980699638, |
| "grad_norm": 0.3606332726485659, |
| "learning_rate": 4.405719392314567e-05, |
| "loss": 0.4411, |
| "step": 515 |
| }, |
| { |
| "epoch": 0.6224366706875754, |
| "grad_norm": 0.437394776038846, |
| "learning_rate": 4.403485254691689e-05, |
| "loss": 0.4565, |
| "step": 516 |
| }, |
| { |
| "epoch": 0.623642943305187, |
| "grad_norm": 0.4130632114314605, |
| "learning_rate": 4.401251117068811e-05, |
| "loss": 0.4666, |
| "step": 517 |
| }, |
| { |
| "epoch": 0.6248492159227985, |
| "grad_norm": 0.6077447752036588, |
| "learning_rate": 4.399016979445934e-05, |
| "loss": 0.4219, |
| "step": 518 |
| }, |
| { |
| "epoch": 0.6260554885404102, |
| "grad_norm": 0.48865858289065855, |
| "learning_rate": 4.396782841823057e-05, |
| "loss": 0.4525, |
| "step": 519 |
| }, |
| { |
| "epoch": 0.6272617611580217, |
| "grad_norm": 0.37378858727980474, |
| "learning_rate": 4.394548704200179e-05, |
| "loss": 0.4154, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.6284680337756333, |
| "grad_norm": 0.4731113479746855, |
| "learning_rate": 4.3923145665773015e-05, |
| "loss": 0.4294, |
| "step": 521 |
| }, |
| { |
| "epoch": 0.6296743063932448, |
| "grad_norm": 0.4090362259372264, |
| "learning_rate": 4.390080428954424e-05, |
| "loss": 0.4314, |
| "step": 522 |
| }, |
| { |
| "epoch": 0.6308805790108565, |
| "grad_norm": 0.3220703974636813, |
| "learning_rate": 4.387846291331546e-05, |
| "loss": 0.4306, |
| "step": 523 |
| }, |
| { |
| "epoch": 0.632086851628468, |
| "grad_norm": 0.42173924202764596, |
| "learning_rate": 4.3856121537086684e-05, |
| "loss": 0.4351, |
| "step": 524 |
| }, |
| { |
| "epoch": 0.6332931242460796, |
| "grad_norm": 0.3330928257528643, |
| "learning_rate": 4.383378016085791e-05, |
| "loss": 0.4303, |
| "step": 525 |
| }, |
| { |
| "epoch": 0.6344993968636912, |
| "grad_norm": 0.41161949162644296, |
| "learning_rate": 4.381143878462913e-05, |
| "loss": 0.4348, |
| "step": 526 |
| }, |
| { |
| "epoch": 0.6357056694813028, |
| "grad_norm": 0.3883545439321418, |
| "learning_rate": 4.378909740840036e-05, |
| "loss": 0.4369, |
| "step": 527 |
| }, |
| { |
| "epoch": 0.6369119420989143, |
| "grad_norm": 0.4325113358235531, |
| "learning_rate": 4.376675603217159e-05, |
| "loss": 0.4532, |
| "step": 528 |
| }, |
| { |
| "epoch": 0.638118214716526, |
| "grad_norm": 0.4380697726970751, |
| "learning_rate": 4.374441465594281e-05, |
| "loss": 0.4643, |
| "step": 529 |
| }, |
| { |
| "epoch": 0.6393244873341375, |
| "grad_norm": 0.35916390367525086, |
| "learning_rate": 4.372207327971403e-05, |
| "loss": 0.4171, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.6405307599517491, |
| "grad_norm": 0.3486136621622154, |
| "learning_rate": 4.3699731903485256e-05, |
| "loss": 0.4488, |
| "step": 531 |
| }, |
| { |
| "epoch": 0.6417370325693607, |
| "grad_norm": 0.3312426151787181, |
| "learning_rate": 4.367739052725648e-05, |
| "loss": 0.4407, |
| "step": 532 |
| }, |
| { |
| "epoch": 0.6429433051869723, |
| "grad_norm": 0.4132145377973402, |
| "learning_rate": 4.3655049151027704e-05, |
| "loss": 0.4334, |
| "step": 533 |
| }, |
| { |
| "epoch": 0.6441495778045838, |
| "grad_norm": 0.34330718379196123, |
| "learning_rate": 4.363270777479893e-05, |
| "loss": 0.445, |
| "step": 534 |
| }, |
| { |
| "epoch": 0.6453558504221955, |
| "grad_norm": 0.45135766155768786, |
| "learning_rate": 4.361036639857015e-05, |
| "loss": 0.4507, |
| "step": 535 |
| }, |
| { |
| "epoch": 0.646562123039807, |
| "grad_norm": 0.35589892838561626, |
| "learning_rate": 4.358802502234138e-05, |
| "loss": 0.4202, |
| "step": 536 |
| }, |
| { |
| "epoch": 0.6477683956574186, |
| "grad_norm": 1.1341629551979147, |
| "learning_rate": 4.35656836461126e-05, |
| "loss": 0.4487, |
| "step": 537 |
| }, |
| { |
| "epoch": 0.6489746682750301, |
| "grad_norm": 0.3743916061859571, |
| "learning_rate": 4.354334226988382e-05, |
| "loss": 0.4502, |
| "step": 538 |
| }, |
| { |
| "epoch": 0.6501809408926418, |
| "grad_norm": 0.3701078540192121, |
| "learning_rate": 4.352100089365505e-05, |
| "loss": 0.4353, |
| "step": 539 |
| }, |
| { |
| "epoch": 0.6513872135102533, |
| "grad_norm": 0.37975867227218413, |
| "learning_rate": 4.3498659517426276e-05, |
| "loss": 0.4604, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.6525934861278649, |
| "grad_norm": 0.45996926881333117, |
| "learning_rate": 4.34763181411975e-05, |
| "loss": 0.4381, |
| "step": 541 |
| }, |
| { |
| "epoch": 0.6537997587454765, |
| "grad_norm": 0.3529432931676143, |
| "learning_rate": 4.3453976764968725e-05, |
| "loss": 0.4469, |
| "step": 542 |
| }, |
| { |
| "epoch": 0.6550060313630881, |
| "grad_norm": 0.4637421891155913, |
| "learning_rate": 4.343163538873995e-05, |
| "loss": 0.4436, |
| "step": 543 |
| }, |
| { |
| "epoch": 0.6562123039806996, |
| "grad_norm": 0.44641761278318115, |
| "learning_rate": 4.340929401251117e-05, |
| "loss": 0.4237, |
| "step": 544 |
| }, |
| { |
| "epoch": 0.6574185765983113, |
| "grad_norm": 0.45548634034253993, |
| "learning_rate": 4.3386952636282394e-05, |
| "loss": 0.451, |
| "step": 545 |
| }, |
| { |
| "epoch": 0.6586248492159228, |
| "grad_norm": 0.6095224416117992, |
| "learning_rate": 4.336461126005362e-05, |
| "loss": 0.4234, |
| "step": 546 |
| }, |
| { |
| "epoch": 0.6598311218335344, |
| "grad_norm": 0.4794907978183171, |
| "learning_rate": 4.334226988382484e-05, |
| "loss": 0.4321, |
| "step": 547 |
| }, |
| { |
| "epoch": 0.661037394451146, |
| "grad_norm": 0.4372188988863073, |
| "learning_rate": 4.331992850759607e-05, |
| "loss": 0.4291, |
| "step": 548 |
| }, |
| { |
| "epoch": 0.6622436670687576, |
| "grad_norm": 0.41945601478116296, |
| "learning_rate": 4.32975871313673e-05, |
| "loss": 0.4205, |
| "step": 549 |
| }, |
| { |
| "epoch": 0.6634499396863691, |
| "grad_norm": 0.5243682305557889, |
| "learning_rate": 4.327524575513852e-05, |
| "loss": 0.4416, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.6646562123039808, |
| "grad_norm": 0.4801155246465036, |
| "learning_rate": 4.3252904378909745e-05, |
| "loss": 0.4462, |
| "step": 551 |
| }, |
| { |
| "epoch": 0.6658624849215923, |
| "grad_norm": 0.42217526503844, |
| "learning_rate": 4.3230563002680966e-05, |
| "loss": 0.4252, |
| "step": 552 |
| }, |
| { |
| "epoch": 0.6670687575392038, |
| "grad_norm": 0.4769157452972345, |
| "learning_rate": 4.3208221626452186e-05, |
| "loss": 0.4277, |
| "step": 553 |
| }, |
| { |
| "epoch": 0.6682750301568154, |
| "grad_norm": 0.4446751167304439, |
| "learning_rate": 4.3185880250223414e-05, |
| "loss": 0.4029, |
| "step": 554 |
| }, |
| { |
| "epoch": 0.669481302774427, |
| "grad_norm": 0.44617281388579455, |
| "learning_rate": 4.316353887399464e-05, |
| "loss": 0.4358, |
| "step": 555 |
| }, |
| { |
| "epoch": 0.6706875753920386, |
| "grad_norm": 0.5947589007882237, |
| "learning_rate": 4.314119749776586e-05, |
| "loss": 0.4505, |
| "step": 556 |
| }, |
| { |
| "epoch": 0.6718938480096501, |
| "grad_norm": 0.3674033271102033, |
| "learning_rate": 4.311885612153709e-05, |
| "loss": 0.4437, |
| "step": 557 |
| }, |
| { |
| "epoch": 0.6731001206272618, |
| "grad_norm": 0.4400826256131888, |
| "learning_rate": 4.309651474530832e-05, |
| "loss": 0.4301, |
| "step": 558 |
| }, |
| { |
| "epoch": 0.6743063932448733, |
| "grad_norm": 0.43015064156020477, |
| "learning_rate": 4.307417336907954e-05, |
| "loss": 0.4334, |
| "step": 559 |
| }, |
| { |
| "epoch": 0.6755126658624849, |
| "grad_norm": 0.3782245475032066, |
| "learning_rate": 4.305183199285076e-05, |
| "loss": 0.419, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.6767189384800965, |
| "grad_norm": 0.4066237562131077, |
| "learning_rate": 4.3029490616621986e-05, |
| "loss": 0.4356, |
| "step": 561 |
| }, |
| { |
| "epoch": 0.6779252110977081, |
| "grad_norm": 0.4187960770131329, |
| "learning_rate": 4.300714924039321e-05, |
| "loss": 0.4289, |
| "step": 562 |
| }, |
| { |
| "epoch": 0.6791314837153196, |
| "grad_norm": 0.35579617743327024, |
| "learning_rate": 4.2984807864164434e-05, |
| "loss": 0.4575, |
| "step": 563 |
| }, |
| { |
| "epoch": 0.6803377563329313, |
| "grad_norm": 0.4409858128600069, |
| "learning_rate": 4.296246648793566e-05, |
| "loss": 0.4633, |
| "step": 564 |
| }, |
| { |
| "epoch": 0.6815440289505428, |
| "grad_norm": 0.3354907808913072, |
| "learning_rate": 4.294012511170689e-05, |
| "loss": 0.4333, |
| "step": 565 |
| }, |
| { |
| "epoch": 0.6827503015681544, |
| "grad_norm": 0.42655397239204723, |
| "learning_rate": 4.291778373547811e-05, |
| "loss": 0.4428, |
| "step": 566 |
| }, |
| { |
| "epoch": 0.6839565741857659, |
| "grad_norm": 0.443037805064068, |
| "learning_rate": 4.289544235924933e-05, |
| "loss": 0.4451, |
| "step": 567 |
| }, |
| { |
| "epoch": 0.6851628468033776, |
| "grad_norm": 0.8914794928360468, |
| "learning_rate": 4.287310098302056e-05, |
| "loss": 0.4396, |
| "step": 568 |
| }, |
| { |
| "epoch": 0.6863691194209891, |
| "grad_norm": 0.5448067106591934, |
| "learning_rate": 4.285075960679178e-05, |
| "loss": 0.461, |
| "step": 569 |
| }, |
| { |
| "epoch": 0.6875753920386007, |
| "grad_norm": 0.42014703815134213, |
| "learning_rate": 4.2828418230563006e-05, |
| "loss": 0.4165, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.6887816646562123, |
| "grad_norm": 0.5363032079607244, |
| "learning_rate": 4.280607685433423e-05, |
| "loss": 0.4481, |
| "step": 571 |
| }, |
| { |
| "epoch": 0.6899879372738239, |
| "grad_norm": 0.6788458284685935, |
| "learning_rate": 4.2783735478105454e-05, |
| "loss": 0.4332, |
| "step": 572 |
| }, |
| { |
| "epoch": 0.6911942098914354, |
| "grad_norm": 0.40379510471482416, |
| "learning_rate": 4.276139410187668e-05, |
| "loss": 0.4439, |
| "step": 573 |
| }, |
| { |
| "epoch": 0.6924004825090471, |
| "grad_norm": 0.6130371034513505, |
| "learning_rate": 4.27390527256479e-05, |
| "loss": 0.4377, |
| "step": 574 |
| }, |
| { |
| "epoch": 0.6936067551266586, |
| "grad_norm": 0.3627939971877631, |
| "learning_rate": 4.271671134941912e-05, |
| "loss": 0.4285, |
| "step": 575 |
| }, |
| { |
| "epoch": 0.6948130277442702, |
| "grad_norm": 0.5305583762077702, |
| "learning_rate": 4.269436997319035e-05, |
| "loss": 0.4201, |
| "step": 576 |
| }, |
| { |
| "epoch": 0.6960193003618818, |
| "grad_norm": 0.510353267102675, |
| "learning_rate": 4.267202859696158e-05, |
| "loss": 0.4132, |
| "step": 577 |
| }, |
| { |
| "epoch": 0.6972255729794934, |
| "grad_norm": 0.5113841119221939, |
| "learning_rate": 4.26496872207328e-05, |
| "loss": 0.4317, |
| "step": 578 |
| }, |
| { |
| "epoch": 0.6984318455971049, |
| "grad_norm": 0.6169504699258387, |
| "learning_rate": 4.2627345844504026e-05, |
| "loss": 0.4364, |
| "step": 579 |
| }, |
| { |
| "epoch": 0.6996381182147166, |
| "grad_norm": 0.5050461237759128, |
| "learning_rate": 4.2605004468275254e-05, |
| "loss": 0.4434, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.7008443908323281, |
| "grad_norm": 0.595300192798241, |
| "learning_rate": 4.258266309204647e-05, |
| "loss": 0.4702, |
| "step": 581 |
| }, |
| { |
| "epoch": 0.7020506634499397, |
| "grad_norm": 0.38965873875054796, |
| "learning_rate": 4.2560321715817695e-05, |
| "loss": 0.4472, |
| "step": 582 |
| }, |
| { |
| "epoch": 0.7032569360675512, |
| "grad_norm": 0.5644938928003483, |
| "learning_rate": 4.253798033958892e-05, |
| "loss": 0.4268, |
| "step": 583 |
| }, |
| { |
| "epoch": 0.7044632086851629, |
| "grad_norm": 0.33911633262614643, |
| "learning_rate": 4.251563896336014e-05, |
| "loss": 0.4366, |
| "step": 584 |
| }, |
| { |
| "epoch": 0.7056694813027744, |
| "grad_norm": 0.5326007243448592, |
| "learning_rate": 4.249329758713137e-05, |
| "loss": 0.455, |
| "step": 585 |
| }, |
| { |
| "epoch": 0.706875753920386, |
| "grad_norm": 0.4721907283458342, |
| "learning_rate": 4.24709562109026e-05, |
| "loss": 0.4214, |
| "step": 586 |
| }, |
| { |
| "epoch": 0.7080820265379976, |
| "grad_norm": 0.4312507346368042, |
| "learning_rate": 4.244861483467382e-05, |
| "loss": 0.444, |
| "step": 587 |
| }, |
| { |
| "epoch": 0.7092882991556092, |
| "grad_norm": 0.4884922993522299, |
| "learning_rate": 4.242627345844504e-05, |
| "loss": 0.4205, |
| "step": 588 |
| }, |
| { |
| "epoch": 0.7104945717732207, |
| "grad_norm": 0.3242841502058886, |
| "learning_rate": 4.240393208221627e-05, |
| "loss": 0.4188, |
| "step": 589 |
| }, |
| { |
| "epoch": 0.7117008443908324, |
| "grad_norm": 0.529266738309419, |
| "learning_rate": 4.238159070598749e-05, |
| "loss": 0.4284, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.7129071170084439, |
| "grad_norm": 0.31078521850095026, |
| "learning_rate": 4.2359249329758715e-05, |
| "loss": 0.4401, |
| "step": 591 |
| }, |
| { |
| "epoch": 0.7141133896260555, |
| "grad_norm": 0.42313591630009106, |
| "learning_rate": 4.233690795352994e-05, |
| "loss": 0.439, |
| "step": 592 |
| }, |
| { |
| "epoch": 0.715319662243667, |
| "grad_norm": 0.3458759202241377, |
| "learning_rate": 4.231456657730116e-05, |
| "loss": 0.432, |
| "step": 593 |
| }, |
| { |
| "epoch": 0.7165259348612787, |
| "grad_norm": 0.3856562354076321, |
| "learning_rate": 4.229222520107239e-05, |
| "loss": 0.4359, |
| "step": 594 |
| }, |
| { |
| "epoch": 0.7177322074788902, |
| "grad_norm": 0.4325118137990474, |
| "learning_rate": 4.226988382484361e-05, |
| "loss": 0.428, |
| "step": 595 |
| }, |
| { |
| "epoch": 0.7189384800965019, |
| "grad_norm": 0.36937544795868116, |
| "learning_rate": 4.224754244861483e-05, |
| "loss": 0.4378, |
| "step": 596 |
| }, |
| { |
| "epoch": 0.7201447527141134, |
| "grad_norm": 0.36827610206035255, |
| "learning_rate": 4.222520107238606e-05, |
| "loss": 0.4367, |
| "step": 597 |
| }, |
| { |
| "epoch": 0.721351025331725, |
| "grad_norm": 0.3412919037234244, |
| "learning_rate": 4.220285969615729e-05, |
| "loss": 0.4428, |
| "step": 598 |
| }, |
| { |
| "epoch": 0.7225572979493365, |
| "grad_norm": 0.4013821463345791, |
| "learning_rate": 4.218051831992851e-05, |
| "loss": 0.435, |
| "step": 599 |
| }, |
| { |
| "epoch": 0.7237635705669482, |
| "grad_norm": 0.35671033183339573, |
| "learning_rate": 4.2158176943699735e-05, |
| "loss": 0.4589, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.7249698431845597, |
| "grad_norm": 0.41435093321906774, |
| "learning_rate": 4.213583556747096e-05, |
| "loss": 0.4463, |
| "step": 601 |
| }, |
| { |
| "epoch": 0.7261761158021713, |
| "grad_norm": 0.4102815510753044, |
| "learning_rate": 4.2113494191242184e-05, |
| "loss": 0.402, |
| "step": 602 |
| }, |
| { |
| "epoch": 0.7273823884197829, |
| "grad_norm": 0.40787860182142316, |
| "learning_rate": 4.2091152815013404e-05, |
| "loss": 0.4426, |
| "step": 603 |
| }, |
| { |
| "epoch": 0.7285886610373945, |
| "grad_norm": 0.35075907908441134, |
| "learning_rate": 4.206881143878463e-05, |
| "loss": 0.4432, |
| "step": 604 |
| }, |
| { |
| "epoch": 0.729794933655006, |
| "grad_norm": 0.3765858326922549, |
| "learning_rate": 4.204647006255585e-05, |
| "loss": 0.4256, |
| "step": 605 |
| }, |
| { |
| "epoch": 0.7310012062726177, |
| "grad_norm": 0.3662765996267215, |
| "learning_rate": 4.202412868632708e-05, |
| "loss": 0.4441, |
| "step": 606 |
| }, |
| { |
| "epoch": 0.7322074788902292, |
| "grad_norm": 0.3993830815115119, |
| "learning_rate": 4.200178731009831e-05, |
| "loss": 0.4585, |
| "step": 607 |
| }, |
| { |
| "epoch": 0.7334137515078407, |
| "grad_norm": 0.43628851383001205, |
| "learning_rate": 4.197944593386953e-05, |
| "loss": 0.4616, |
| "step": 608 |
| }, |
| { |
| "epoch": 0.7346200241254524, |
| "grad_norm": 0.31815655209223725, |
| "learning_rate": 4.1957104557640756e-05, |
| "loss": 0.4101, |
| "step": 609 |
| }, |
| { |
| "epoch": 0.7358262967430639, |
| "grad_norm": 0.3673715240625822, |
| "learning_rate": 4.1934763181411976e-05, |
| "loss": 0.4284, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.7370325693606755, |
| "grad_norm": 0.3683279900141664, |
| "learning_rate": 4.19124218051832e-05, |
| "loss": 0.4392, |
| "step": 611 |
| }, |
| { |
| "epoch": 0.738238841978287, |
| "grad_norm": 0.3619292606667547, |
| "learning_rate": 4.1890080428954424e-05, |
| "loss": 0.4297, |
| "step": 612 |
| }, |
| { |
| "epoch": 0.7394451145958987, |
| "grad_norm": 0.35361671306430015, |
| "learning_rate": 4.186773905272565e-05, |
| "loss": 0.4434, |
| "step": 613 |
| }, |
| { |
| "epoch": 0.7406513872135102, |
| "grad_norm": 0.31710675852694437, |
| "learning_rate": 4.184539767649687e-05, |
| "loss": 0.4461, |
| "step": 614 |
| }, |
| { |
| "epoch": 0.7418576598311218, |
| "grad_norm": 0.35186947742683367, |
| "learning_rate": 4.18230563002681e-05, |
| "loss": 0.4279, |
| "step": 615 |
| }, |
| { |
| "epoch": 0.7430639324487334, |
| "grad_norm": 0.3445887427896891, |
| "learning_rate": 4.180071492403933e-05, |
| "loss": 0.4273, |
| "step": 616 |
| }, |
| { |
| "epoch": 0.744270205066345, |
| "grad_norm": 0.3681697377341119, |
| "learning_rate": 4.177837354781055e-05, |
| "loss": 0.4477, |
| "step": 617 |
| }, |
| { |
| "epoch": 0.7454764776839565, |
| "grad_norm": 0.38596304651196306, |
| "learning_rate": 4.175603217158177e-05, |
| "loss": 0.4548, |
| "step": 618 |
| }, |
| { |
| "epoch": 0.7466827503015682, |
| "grad_norm": 0.33378736186801616, |
| "learning_rate": 4.1733690795352996e-05, |
| "loss": 0.439, |
| "step": 619 |
| }, |
| { |
| "epoch": 0.7478890229191797, |
| "grad_norm": 0.41780476815690276, |
| "learning_rate": 4.171134941912422e-05, |
| "loss": 0.4344, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.7490952955367913, |
| "grad_norm": 0.3404196374099104, |
| "learning_rate": 4.1689008042895445e-05, |
| "loss": 0.4204, |
| "step": 621 |
| }, |
| { |
| "epoch": 0.7503015681544029, |
| "grad_norm": 0.3631500695840166, |
| "learning_rate": 4.166666666666667e-05, |
| "loss": 0.4368, |
| "step": 622 |
| }, |
| { |
| "epoch": 0.7515078407720145, |
| "grad_norm": 0.33649523812124915, |
| "learning_rate": 4.164432529043789e-05, |
| "loss": 0.4411, |
| "step": 623 |
| }, |
| { |
| "epoch": 0.752714113389626, |
| "grad_norm": 0.4023001151383432, |
| "learning_rate": 4.162198391420912e-05, |
| "loss": 0.4318, |
| "step": 624 |
| }, |
| { |
| "epoch": 0.7539203860072377, |
| "grad_norm": 0.3827957756912793, |
| "learning_rate": 4.159964253798034e-05, |
| "loss": 0.4177, |
| "step": 625 |
| }, |
| { |
| "epoch": 0.7551266586248492, |
| "grad_norm": 0.39090196736454064, |
| "learning_rate": 4.157730116175156e-05, |
| "loss": 0.4113, |
| "step": 626 |
| }, |
| { |
| "epoch": 0.7563329312424608, |
| "grad_norm": 0.33944855823417563, |
| "learning_rate": 4.155495978552279e-05, |
| "loss": 0.4229, |
| "step": 627 |
| }, |
| { |
| "epoch": 0.7575392038600723, |
| "grad_norm": 0.4417958155266135, |
| "learning_rate": 4.1532618409294017e-05, |
| "loss": 0.4383, |
| "step": 628 |
| }, |
| { |
| "epoch": 0.758745476477684, |
| "grad_norm": 0.5006482946012983, |
| "learning_rate": 4.151027703306524e-05, |
| "loss": 0.426, |
| "step": 629 |
| }, |
| { |
| "epoch": 0.7599517490952955, |
| "grad_norm": 0.32717617902623447, |
| "learning_rate": 4.1487935656836465e-05, |
| "loss": 0.4379, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.7611580217129071, |
| "grad_norm": 0.45592424124753594, |
| "learning_rate": 4.146559428060769e-05, |
| "loss": 0.4236, |
| "step": 631 |
| }, |
| { |
| "epoch": 0.7623642943305187, |
| "grad_norm": 0.347387273411557, |
| "learning_rate": 4.144325290437891e-05, |
| "loss": 0.4124, |
| "step": 632 |
| }, |
| { |
| "epoch": 0.7635705669481303, |
| "grad_norm": 0.3721402905982405, |
| "learning_rate": 4.1420911528150134e-05, |
| "loss": 0.411, |
| "step": 633 |
| }, |
| { |
| "epoch": 0.7647768395657418, |
| "grad_norm": 0.3970352649019345, |
| "learning_rate": 4.139857015192136e-05, |
| "loss": 0.4579, |
| "step": 634 |
| }, |
| { |
| "epoch": 0.7659831121833535, |
| "grad_norm": 0.43579423143308493, |
| "learning_rate": 4.137622877569258e-05, |
| "loss": 0.4296, |
| "step": 635 |
| }, |
| { |
| "epoch": 0.767189384800965, |
| "grad_norm": 0.39279465007605335, |
| "learning_rate": 4.135388739946381e-05, |
| "loss": 0.4231, |
| "step": 636 |
| }, |
| { |
| "epoch": 0.7683956574185766, |
| "grad_norm": 0.39264419792414523, |
| "learning_rate": 4.133154602323504e-05, |
| "loss": 0.4496, |
| "step": 637 |
| }, |
| { |
| "epoch": 0.7696019300361882, |
| "grad_norm": 0.37493841849128356, |
| "learning_rate": 4.130920464700626e-05, |
| "loss": 0.4291, |
| "step": 638 |
| }, |
| { |
| "epoch": 0.7708082026537998, |
| "grad_norm": 0.3116259448118478, |
| "learning_rate": 4.1286863270777485e-05, |
| "loss": 0.4065, |
| "step": 639 |
| }, |
| { |
| "epoch": 0.7720144752714113, |
| "grad_norm": 0.34983397918562, |
| "learning_rate": 4.1264521894548706e-05, |
| "loss": 0.4344, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.773220747889023, |
| "grad_norm": 0.37624317148252306, |
| "learning_rate": 4.1242180518319926e-05, |
| "loss": 0.4294, |
| "step": 641 |
| }, |
| { |
| "epoch": 0.7744270205066345, |
| "grad_norm": 0.35520939634197213, |
| "learning_rate": 4.1219839142091154e-05, |
| "loss": 0.4148, |
| "step": 642 |
| }, |
| { |
| "epoch": 0.7756332931242461, |
| "grad_norm": 0.4209679722778236, |
| "learning_rate": 4.119749776586238e-05, |
| "loss": 0.4405, |
| "step": 643 |
| }, |
| { |
| "epoch": 0.7768395657418576, |
| "grad_norm": 0.37647621537087084, |
| "learning_rate": 4.11751563896336e-05, |
| "loss": 0.4428, |
| "step": 644 |
| }, |
| { |
| "epoch": 0.7780458383594693, |
| "grad_norm": 0.444753555144684, |
| "learning_rate": 4.115281501340483e-05, |
| "loss": 0.4147, |
| "step": 645 |
| }, |
| { |
| "epoch": 0.7792521109770808, |
| "grad_norm": 0.38638377205425406, |
| "learning_rate": 4.113047363717605e-05, |
| "loss": 0.4444, |
| "step": 646 |
| }, |
| { |
| "epoch": 0.7804583835946924, |
| "grad_norm": 0.3403369626116202, |
| "learning_rate": 4.110813226094727e-05, |
| "loss": 0.4094, |
| "step": 647 |
| }, |
| { |
| "epoch": 0.781664656212304, |
| "grad_norm": 0.4152796308986888, |
| "learning_rate": 4.10857908847185e-05, |
| "loss": 0.4184, |
| "step": 648 |
| }, |
| { |
| "epoch": 0.7828709288299156, |
| "grad_norm": 0.42659585360095775, |
| "learning_rate": 4.1063449508489726e-05, |
| "loss": 0.4376, |
| "step": 649 |
| }, |
| { |
| "epoch": 0.7840772014475271, |
| "grad_norm": 0.3878286833998237, |
| "learning_rate": 4.1041108132260947e-05, |
| "loss": 0.4424, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.7852834740651388, |
| "grad_norm": 0.4279745826656998, |
| "learning_rate": 4.1018766756032174e-05, |
| "loss": 0.4171, |
| "step": 651 |
| }, |
| { |
| "epoch": 0.7864897466827503, |
| "grad_norm": 0.4037837710960183, |
| "learning_rate": 4.09964253798034e-05, |
| "loss": 0.4209, |
| "step": 652 |
| }, |
| { |
| "epoch": 0.7876960193003619, |
| "grad_norm": 0.328778160496694, |
| "learning_rate": 4.097408400357462e-05, |
| "loss": 0.4161, |
| "step": 653 |
| }, |
| { |
| "epoch": 0.7889022919179735, |
| "grad_norm": 0.5091749659268239, |
| "learning_rate": 4.095174262734584e-05, |
| "loss": 0.4059, |
| "step": 654 |
| }, |
| { |
| "epoch": 0.7901085645355851, |
| "grad_norm": 0.38575372225419235, |
| "learning_rate": 4.092940125111707e-05, |
| "loss": 0.4306, |
| "step": 655 |
| }, |
| { |
| "epoch": 0.7913148371531966, |
| "grad_norm": 0.3564858997238752, |
| "learning_rate": 4.090705987488829e-05, |
| "loss": 0.4229, |
| "step": 656 |
| }, |
| { |
| "epoch": 0.7925211097708083, |
| "grad_norm": 0.33263580029482265, |
| "learning_rate": 4.088471849865952e-05, |
| "loss": 0.4238, |
| "step": 657 |
| }, |
| { |
| "epoch": 0.7937273823884198, |
| "grad_norm": 0.3770972817311997, |
| "learning_rate": 4.0862377122430746e-05, |
| "loss": 0.4215, |
| "step": 658 |
| }, |
| { |
| "epoch": 0.7949336550060314, |
| "grad_norm": 0.34460138647484384, |
| "learning_rate": 4.084003574620197e-05, |
| "loss": 0.4624, |
| "step": 659 |
| }, |
| { |
| "epoch": 0.7961399276236429, |
| "grad_norm": 0.39795136433340583, |
| "learning_rate": 4.0817694369973194e-05, |
| "loss": 0.4717, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.7973462002412546, |
| "grad_norm": 0.307065657764241, |
| "learning_rate": 4.0795352993744415e-05, |
| "loss": 0.4085, |
| "step": 661 |
| }, |
| { |
| "epoch": 0.7985524728588661, |
| "grad_norm": 0.36134507736793015, |
| "learning_rate": 4.0773011617515636e-05, |
| "loss": 0.4253, |
| "step": 662 |
| }, |
| { |
| "epoch": 0.7997587454764777, |
| "grad_norm": 0.36337250256856457, |
| "learning_rate": 4.075067024128686e-05, |
| "loss": 0.423, |
| "step": 663 |
| }, |
| { |
| "epoch": 0.8009650180940893, |
| "grad_norm": 0.34792061661231566, |
| "learning_rate": 4.072832886505809e-05, |
| "loss": 0.4099, |
| "step": 664 |
| }, |
| { |
| "epoch": 0.8021712907117008, |
| "grad_norm": 0.32356302620167704, |
| "learning_rate": 4.070598748882931e-05, |
| "loss": 0.4252, |
| "step": 665 |
| }, |
| { |
| "epoch": 0.8033775633293124, |
| "grad_norm": 0.32657888669961704, |
| "learning_rate": 4.068364611260054e-05, |
| "loss": 0.4373, |
| "step": 666 |
| }, |
| { |
| "epoch": 0.804583835946924, |
| "grad_norm": 0.37539157769448306, |
| "learning_rate": 4.0661304736371766e-05, |
| "loss": 0.4399, |
| "step": 667 |
| }, |
| { |
| "epoch": 0.8057901085645356, |
| "grad_norm": 0.3311339803296136, |
| "learning_rate": 4.063896336014299e-05, |
| "loss": 0.436, |
| "step": 668 |
| }, |
| { |
| "epoch": 0.8069963811821471, |
| "grad_norm": 0.36972556713265975, |
| "learning_rate": 4.061662198391421e-05, |
| "loss": 0.432, |
| "step": 669 |
| }, |
| { |
| "epoch": 0.8082026537997588, |
| "grad_norm": 0.37032736900349966, |
| "learning_rate": 4.0594280607685435e-05, |
| "loss": 0.4483, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.8094089264173703, |
| "grad_norm": 0.35889492771597925, |
| "learning_rate": 4.0571939231456656e-05, |
| "loss": 0.4384, |
| "step": 671 |
| }, |
| { |
| "epoch": 0.8106151990349819, |
| "grad_norm": 0.3917827657498273, |
| "learning_rate": 4.054959785522788e-05, |
| "loss": 0.4586, |
| "step": 672 |
| }, |
| { |
| "epoch": 0.8118214716525934, |
| "grad_norm": 0.3449677493546546, |
| "learning_rate": 4.052725647899911e-05, |
| "loss": 0.4515, |
| "step": 673 |
| }, |
| { |
| "epoch": 0.8130277442702051, |
| "grad_norm": 0.3675384445750401, |
| "learning_rate": 4.050491510277033e-05, |
| "loss": 0.4085, |
| "step": 674 |
| }, |
| { |
| "epoch": 0.8142340168878166, |
| "grad_norm": 0.314056093770354, |
| "learning_rate": 4.048257372654156e-05, |
| "loss": 0.4145, |
| "step": 675 |
| }, |
| { |
| "epoch": 0.8154402895054282, |
| "grad_norm": 0.2884839297871985, |
| "learning_rate": 4.046023235031278e-05, |
| "loss": 0.4085, |
| "step": 676 |
| }, |
| { |
| "epoch": 0.8166465621230398, |
| "grad_norm": 0.3419757025514419, |
| "learning_rate": 4.0437890974084e-05, |
| "loss": 0.4372, |
| "step": 677 |
| }, |
| { |
| "epoch": 0.8178528347406514, |
| "grad_norm": 0.3350898764681184, |
| "learning_rate": 4.041554959785523e-05, |
| "loss": 0.4232, |
| "step": 678 |
| }, |
| { |
| "epoch": 0.8190591073582629, |
| "grad_norm": 0.3101794969989681, |
| "learning_rate": 4.0393208221626455e-05, |
| "loss": 0.4118, |
| "step": 679 |
| }, |
| { |
| "epoch": 0.8202653799758746, |
| "grad_norm": 0.34632856979523563, |
| "learning_rate": 4.0370866845397676e-05, |
| "loss": 0.3988, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.8214716525934861, |
| "grad_norm": 0.3539539495668904, |
| "learning_rate": 4.0348525469168903e-05, |
| "loss": 0.4244, |
| "step": 681 |
| }, |
| { |
| "epoch": 0.8226779252110977, |
| "grad_norm": 0.31397960297819166, |
| "learning_rate": 4.032618409294013e-05, |
| "loss": 0.4191, |
| "step": 682 |
| }, |
| { |
| "epoch": 0.8238841978287093, |
| "grad_norm": 0.4450760042397345, |
| "learning_rate": 4.030384271671135e-05, |
| "loss": 0.4417, |
| "step": 683 |
| }, |
| { |
| "epoch": 0.8250904704463209, |
| "grad_norm": 0.3758257266264463, |
| "learning_rate": 4.028150134048257e-05, |
| "loss": 0.4326, |
| "step": 684 |
| }, |
| { |
| "epoch": 0.8262967430639324, |
| "grad_norm": 0.4347027419099405, |
| "learning_rate": 4.02591599642538e-05, |
| "loss": 0.432, |
| "step": 685 |
| }, |
| { |
| "epoch": 0.827503015681544, |
| "grad_norm": 0.33281647369934925, |
| "learning_rate": 4.023681858802503e-05, |
| "loss": 0.4443, |
| "step": 686 |
| }, |
| { |
| "epoch": 0.8287092882991556, |
| "grad_norm": 0.44210413889252337, |
| "learning_rate": 4.021447721179625e-05, |
| "loss": 0.4365, |
| "step": 687 |
| }, |
| { |
| "epoch": 0.8299155609167672, |
| "grad_norm": 0.36379774369825985, |
| "learning_rate": 4.0192135835567475e-05, |
| "loss": 0.4407, |
| "step": 688 |
| }, |
| { |
| "epoch": 0.8311218335343787, |
| "grad_norm": 0.3409375216448993, |
| "learning_rate": 4.01697944593387e-05, |
| "loss": 0.4163, |
| "step": 689 |
| }, |
| { |
| "epoch": 0.8323281061519904, |
| "grad_norm": 0.4407071439643426, |
| "learning_rate": 4.0147453083109924e-05, |
| "loss": 0.4289, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.8335343787696019, |
| "grad_norm": 0.3440826546937714, |
| "learning_rate": 4.0125111706881144e-05, |
| "loss": 0.4262, |
| "step": 691 |
| }, |
| { |
| "epoch": 0.8347406513872135, |
| "grad_norm": 0.3415814685987661, |
| "learning_rate": 4.010277033065237e-05, |
| "loss": 0.4203, |
| "step": 692 |
| }, |
| { |
| "epoch": 0.8359469240048251, |
| "grad_norm": 0.3950448918022183, |
| "learning_rate": 4.008042895442359e-05, |
| "loss": 0.4552, |
| "step": 693 |
| }, |
| { |
| "epoch": 0.8371531966224367, |
| "grad_norm": 0.3682136713582369, |
| "learning_rate": 4.005808757819482e-05, |
| "loss": 0.4293, |
| "step": 694 |
| }, |
| { |
| "epoch": 0.8383594692400482, |
| "grad_norm": 0.3245379694872947, |
| "learning_rate": 4.003574620196605e-05, |
| "loss": 0.4466, |
| "step": 695 |
| }, |
| { |
| "epoch": 0.8395657418576599, |
| "grad_norm": 0.4043111926095754, |
| "learning_rate": 4.001340482573727e-05, |
| "loss": 0.4124, |
| "step": 696 |
| }, |
| { |
| "epoch": 0.8407720144752714, |
| "grad_norm": 0.3676847847505625, |
| "learning_rate": 3.9991063449508496e-05, |
| "loss": 0.4386, |
| "step": 697 |
| }, |
| { |
| "epoch": 0.841978287092883, |
| "grad_norm": 0.38345621376583905, |
| "learning_rate": 3.9968722073279716e-05, |
| "loss": 0.4206, |
| "step": 698 |
| }, |
| { |
| "epoch": 0.8431845597104946, |
| "grad_norm": 0.3973129732421497, |
| "learning_rate": 3.994638069705094e-05, |
| "loss": 0.444, |
| "step": 699 |
| }, |
| { |
| "epoch": 0.8443908323281062, |
| "grad_norm": 0.40507692158099656, |
| "learning_rate": 3.9924039320822164e-05, |
| "loss": 0.4292, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.8455971049457177, |
| "grad_norm": 0.44304365309322413, |
| "learning_rate": 3.990169794459339e-05, |
| "loss": 0.4351, |
| "step": 701 |
| }, |
| { |
| "epoch": 0.8468033775633294, |
| "grad_norm": 0.34455473505953255, |
| "learning_rate": 3.987935656836461e-05, |
| "loss": 0.4377, |
| "step": 702 |
| }, |
| { |
| "epoch": 0.8480096501809409, |
| "grad_norm": 0.41711458301265714, |
| "learning_rate": 3.985701519213584e-05, |
| "loss": 0.432, |
| "step": 703 |
| }, |
| { |
| "epoch": 0.8492159227985525, |
| "grad_norm": 0.3930062754782553, |
| "learning_rate": 3.983467381590707e-05, |
| "loss": 0.4352, |
| "step": 704 |
| }, |
| { |
| "epoch": 0.850422195416164, |
| "grad_norm": 0.45827478708504854, |
| "learning_rate": 3.981233243967828e-05, |
| "loss": 0.4356, |
| "step": 705 |
| }, |
| { |
| "epoch": 0.8516284680337757, |
| "grad_norm": 0.4100191953270183, |
| "learning_rate": 3.978999106344951e-05, |
| "loss": 0.4113, |
| "step": 706 |
| }, |
| { |
| "epoch": 0.8528347406513872, |
| "grad_norm": 0.3674509170919512, |
| "learning_rate": 3.9767649687220737e-05, |
| "loss": 0.4135, |
| "step": 707 |
| }, |
| { |
| "epoch": 0.8540410132689988, |
| "grad_norm": 0.4479376512445783, |
| "learning_rate": 3.974530831099196e-05, |
| "loss": 0.4278, |
| "step": 708 |
| }, |
| { |
| "epoch": 0.8552472858866104, |
| "grad_norm": 0.38078667965255886, |
| "learning_rate": 3.9722966934763185e-05, |
| "loss": 0.4362, |
| "step": 709 |
| }, |
| { |
| "epoch": 0.856453558504222, |
| "grad_norm": 0.3795248071192456, |
| "learning_rate": 3.970062555853441e-05, |
| "loss": 0.4256, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.8576598311218335, |
| "grad_norm": 0.4452777512730255, |
| "learning_rate": 3.967828418230563e-05, |
| "loss": 0.4048, |
| "step": 711 |
| }, |
| { |
| "epoch": 0.8588661037394452, |
| "grad_norm": 0.4039038032056285, |
| "learning_rate": 3.9655942806076854e-05, |
| "loss": 0.4305, |
| "step": 712 |
| }, |
| { |
| "epoch": 0.8600723763570567, |
| "grad_norm": 0.38705436215447936, |
| "learning_rate": 3.963360142984808e-05, |
| "loss": 0.4338, |
| "step": 713 |
| }, |
| { |
| "epoch": 0.8612786489746683, |
| "grad_norm": 0.37612795284145023, |
| "learning_rate": 3.96112600536193e-05, |
| "loss": 0.4356, |
| "step": 714 |
| }, |
| { |
| "epoch": 0.8624849215922799, |
| "grad_norm": 0.4027892381276643, |
| "learning_rate": 3.958891867739053e-05, |
| "loss": 0.4259, |
| "step": 715 |
| }, |
| { |
| "epoch": 0.8636911942098915, |
| "grad_norm": 0.4474291942673631, |
| "learning_rate": 3.956657730116176e-05, |
| "loss": 0.4191, |
| "step": 716 |
| }, |
| { |
| "epoch": 0.864897466827503, |
| "grad_norm": 0.37294989078633567, |
| "learning_rate": 3.954423592493298e-05, |
| "loss": 0.4421, |
| "step": 717 |
| }, |
| { |
| "epoch": 0.8661037394451147, |
| "grad_norm": 0.39823462537023213, |
| "learning_rate": 3.9521894548704205e-05, |
| "loss": 0.4038, |
| "step": 718 |
| }, |
| { |
| "epoch": 0.8673100120627262, |
| "grad_norm": 0.3165892994057179, |
| "learning_rate": 3.9499553172475426e-05, |
| "loss": 0.4276, |
| "step": 719 |
| }, |
| { |
| "epoch": 0.8685162846803377, |
| "grad_norm": 0.3217081991040394, |
| "learning_rate": 3.9477211796246646e-05, |
| "loss": 0.4421, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.8697225572979493, |
| "grad_norm": 0.34234595334126144, |
| "learning_rate": 3.9454870420017874e-05, |
| "loss": 0.4153, |
| "step": 721 |
| }, |
| { |
| "epoch": 0.8709288299155609, |
| "grad_norm": 0.3012747513317574, |
| "learning_rate": 3.94325290437891e-05, |
| "loss": 0.431, |
| "step": 722 |
| }, |
| { |
| "epoch": 0.8721351025331725, |
| "grad_norm": 0.4426572348450103, |
| "learning_rate": 3.941018766756032e-05, |
| "loss": 0.4614, |
| "step": 723 |
| }, |
| { |
| "epoch": 0.873341375150784, |
| "grad_norm": 0.30865825529588037, |
| "learning_rate": 3.938784629133155e-05, |
| "loss": 0.4376, |
| "step": 724 |
| }, |
| { |
| "epoch": 0.8745476477683957, |
| "grad_norm": 0.3804165775048893, |
| "learning_rate": 3.936550491510278e-05, |
| "loss": 0.4242, |
| "step": 725 |
| }, |
| { |
| "epoch": 0.8757539203860072, |
| "grad_norm": 0.3590587327120939, |
| "learning_rate": 3.9343163538874e-05, |
| "loss": 0.4247, |
| "step": 726 |
| }, |
| { |
| "epoch": 0.8769601930036188, |
| "grad_norm": 0.38885826359929504, |
| "learning_rate": 3.932082216264522e-05, |
| "loss": 0.4548, |
| "step": 727 |
| }, |
| { |
| "epoch": 0.8781664656212304, |
| "grad_norm": 0.34482562337158273, |
| "learning_rate": 3.9298480786416446e-05, |
| "loss": 0.4311, |
| "step": 728 |
| }, |
| { |
| "epoch": 0.879372738238842, |
| "grad_norm": 0.4135444476948955, |
| "learning_rate": 3.9276139410187666e-05, |
| "loss": 0.4328, |
| "step": 729 |
| }, |
| { |
| "epoch": 0.8805790108564535, |
| "grad_norm": 0.3520471335612592, |
| "learning_rate": 3.9253798033958894e-05, |
| "loss": 0.4166, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.8817852834740652, |
| "grad_norm": 0.4012911512212805, |
| "learning_rate": 3.923145665773012e-05, |
| "loss": 0.4189, |
| "step": 731 |
| }, |
| { |
| "epoch": 0.8829915560916767, |
| "grad_norm": 0.38055005376337286, |
| "learning_rate": 3.920911528150134e-05, |
| "loss": 0.4343, |
| "step": 732 |
| }, |
| { |
| "epoch": 0.8841978287092883, |
| "grad_norm": 0.3525565527349451, |
| "learning_rate": 3.918677390527257e-05, |
| "loss": 0.4211, |
| "step": 733 |
| }, |
| { |
| "epoch": 0.8854041013268998, |
| "grad_norm": 0.44952656074215147, |
| "learning_rate": 3.916443252904379e-05, |
| "loss": 0.4247, |
| "step": 734 |
| }, |
| { |
| "epoch": 0.8866103739445115, |
| "grad_norm": 0.38931178479387923, |
| "learning_rate": 3.914209115281501e-05, |
| "loss": 0.4091, |
| "step": 735 |
| }, |
| { |
| "epoch": 0.887816646562123, |
| "grad_norm": 0.4295833832365165, |
| "learning_rate": 3.911974977658624e-05, |
| "loss": 0.4273, |
| "step": 736 |
| }, |
| { |
| "epoch": 0.8890229191797346, |
| "grad_norm": 0.418788573846933, |
| "learning_rate": 3.9097408400357466e-05, |
| "loss": 0.4324, |
| "step": 737 |
| }, |
| { |
| "epoch": 0.8902291917973462, |
| "grad_norm": 0.4279678016240271, |
| "learning_rate": 3.907506702412869e-05, |
| "loss": 0.4369, |
| "step": 738 |
| }, |
| { |
| "epoch": 0.8914354644149578, |
| "grad_norm": 0.4706663367413349, |
| "learning_rate": 3.9052725647899914e-05, |
| "loss": 0.4425, |
| "step": 739 |
| }, |
| { |
| "epoch": 0.8926417370325693, |
| "grad_norm": 0.33644996594988985, |
| "learning_rate": 3.903038427167114e-05, |
| "loss": 0.4333, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.893848009650181, |
| "grad_norm": 0.5219711361177203, |
| "learning_rate": 3.900804289544236e-05, |
| "loss": 0.4139, |
| "step": 741 |
| }, |
| { |
| "epoch": 0.8950542822677925, |
| "grad_norm": 0.3655070920705575, |
| "learning_rate": 3.898570151921358e-05, |
| "loss": 0.4215, |
| "step": 742 |
| }, |
| { |
| "epoch": 0.8962605548854041, |
| "grad_norm": 0.4620182845103301, |
| "learning_rate": 3.896336014298481e-05, |
| "loss": 0.4391, |
| "step": 743 |
| }, |
| { |
| "epoch": 0.8974668275030157, |
| "grad_norm": 0.446013501499081, |
| "learning_rate": 3.894101876675603e-05, |
| "loss": 0.4113, |
| "step": 744 |
| }, |
| { |
| "epoch": 0.8986731001206273, |
| "grad_norm": 0.34459110535041027, |
| "learning_rate": 3.891867739052726e-05, |
| "loss": 0.418, |
| "step": 745 |
| }, |
| { |
| "epoch": 0.8998793727382388, |
| "grad_norm": 0.39907797574459497, |
| "learning_rate": 3.8896336014298486e-05, |
| "loss": 0.4245, |
| "step": 746 |
| }, |
| { |
| "epoch": 0.9010856453558505, |
| "grad_norm": 0.3664286457363056, |
| "learning_rate": 3.887399463806971e-05, |
| "loss": 0.4253, |
| "step": 747 |
| }, |
| { |
| "epoch": 0.902291917973462, |
| "grad_norm": 0.41961271222361207, |
| "learning_rate": 3.8851653261840934e-05, |
| "loss": 0.422, |
| "step": 748 |
| }, |
| { |
| "epoch": 0.9034981905910736, |
| "grad_norm": 0.34696641892186714, |
| "learning_rate": 3.8829311885612155e-05, |
| "loss": 0.4262, |
| "step": 749 |
| }, |
| { |
| "epoch": 0.9047044632086851, |
| "grad_norm": 0.3805847464393101, |
| "learning_rate": 3.8806970509383376e-05, |
| "loss": 0.4222, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.9059107358262968, |
| "grad_norm": 0.3628814065269647, |
| "learning_rate": 3.87846291331546e-05, |
| "loss": 0.415, |
| "step": 751 |
| }, |
| { |
| "epoch": 0.9071170084439083, |
| "grad_norm": 0.4155607336726821, |
| "learning_rate": 3.876228775692583e-05, |
| "loss": 0.4529, |
| "step": 752 |
| }, |
| { |
| "epoch": 0.9083232810615199, |
| "grad_norm": 0.3792089287803448, |
| "learning_rate": 3.873994638069705e-05, |
| "loss": 0.4427, |
| "step": 753 |
| }, |
| { |
| "epoch": 0.9095295536791315, |
| "grad_norm": 0.31130821301505, |
| "learning_rate": 3.871760500446828e-05, |
| "loss": 0.4065, |
| "step": 754 |
| }, |
| { |
| "epoch": 0.9107358262967431, |
| "grad_norm": 0.3129142676327276, |
| "learning_rate": 3.8695263628239506e-05, |
| "loss": 0.4089, |
| "step": 755 |
| }, |
| { |
| "epoch": 0.9119420989143546, |
| "grad_norm": 0.3903036615010061, |
| "learning_rate": 3.867292225201073e-05, |
| "loss": 0.4222, |
| "step": 756 |
| }, |
| { |
| "epoch": 0.9131483715319663, |
| "grad_norm": 0.3578101284227395, |
| "learning_rate": 3.865058087578195e-05, |
| "loss": 0.4048, |
| "step": 757 |
| }, |
| { |
| "epoch": 0.9143546441495778, |
| "grad_norm": 0.40198410528206696, |
| "learning_rate": 3.8628239499553175e-05, |
| "loss": 0.4247, |
| "step": 758 |
| }, |
| { |
| "epoch": 0.9155609167671894, |
| "grad_norm": 0.469693215846757, |
| "learning_rate": 3.8605898123324396e-05, |
| "loss": 0.4214, |
| "step": 759 |
| }, |
| { |
| "epoch": 0.916767189384801, |
| "grad_norm": 0.3485330736681551, |
| "learning_rate": 3.858355674709562e-05, |
| "loss": 0.4394, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.9179734620024126, |
| "grad_norm": 0.5510670824668625, |
| "learning_rate": 3.856121537086685e-05, |
| "loss": 0.4265, |
| "step": 761 |
| }, |
| { |
| "epoch": 0.9191797346200241, |
| "grad_norm": 0.39837727033428083, |
| "learning_rate": 3.853887399463807e-05, |
| "loss": 0.418, |
| "step": 762 |
| }, |
| { |
| "epoch": 0.9203860072376358, |
| "grad_norm": 0.41864197863604735, |
| "learning_rate": 3.851653261840929e-05, |
| "loss": 0.4319, |
| "step": 763 |
| }, |
| { |
| "epoch": 0.9215922798552473, |
| "grad_norm": 0.4574033736259981, |
| "learning_rate": 3.849419124218052e-05, |
| "loss": 0.4292, |
| "step": 764 |
| }, |
| { |
| "epoch": 0.9227985524728589, |
| "grad_norm": 0.3213110702684741, |
| "learning_rate": 3.847184986595174e-05, |
| "loss": 0.4098, |
| "step": 765 |
| }, |
| { |
| "epoch": 0.9240048250904704, |
| "grad_norm": 0.4707116003780227, |
| "learning_rate": 3.844950848972297e-05, |
| "loss": 0.4244, |
| "step": 766 |
| }, |
| { |
| "epoch": 0.9252110977080821, |
| "grad_norm": 0.4830169788101642, |
| "learning_rate": 3.8427167113494195e-05, |
| "loss": 0.4157, |
| "step": 767 |
| }, |
| { |
| "epoch": 0.9264173703256936, |
| "grad_norm": 0.4560897134292833, |
| "learning_rate": 3.8404825737265416e-05, |
| "loss": 0.4393, |
| "step": 768 |
| }, |
| { |
| "epoch": 0.9276236429433052, |
| "grad_norm": 0.6482326714956513, |
| "learning_rate": 3.8382484361036644e-05, |
| "loss": 0.4185, |
| "step": 769 |
| }, |
| { |
| "epoch": 0.9288299155609168, |
| "grad_norm": 0.4137410345743272, |
| "learning_rate": 3.8360142984807864e-05, |
| "loss": 0.4452, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.9300361881785284, |
| "grad_norm": 0.6822256387321779, |
| "learning_rate": 3.8337801608579085e-05, |
| "loss": 0.4293, |
| "step": 771 |
| }, |
| { |
| "epoch": 0.9312424607961399, |
| "grad_norm": 0.30603838243949, |
| "learning_rate": 3.831546023235031e-05, |
| "loss": 0.4346, |
| "step": 772 |
| }, |
| { |
| "epoch": 0.9324487334137516, |
| "grad_norm": 0.5898745156575074, |
| "learning_rate": 3.829311885612154e-05, |
| "loss": 0.4231, |
| "step": 773 |
| }, |
| { |
| "epoch": 0.9336550060313631, |
| "grad_norm": 0.33761968440146145, |
| "learning_rate": 3.827077747989276e-05, |
| "loss": 0.4153, |
| "step": 774 |
| }, |
| { |
| "epoch": 0.9348612786489746, |
| "grad_norm": 0.49516978453532084, |
| "learning_rate": 3.824843610366399e-05, |
| "loss": 0.4038, |
| "step": 775 |
| }, |
| { |
| "epoch": 0.9360675512665863, |
| "grad_norm": 0.4513687887601463, |
| "learning_rate": 3.8226094727435216e-05, |
| "loss": 0.4322, |
| "step": 776 |
| }, |
| { |
| "epoch": 0.9372738238841978, |
| "grad_norm": 0.4062297835365191, |
| "learning_rate": 3.8203753351206436e-05, |
| "loss": 0.4186, |
| "step": 777 |
| }, |
| { |
| "epoch": 0.9384800965018094, |
| "grad_norm": 0.48900573314200335, |
| "learning_rate": 3.818141197497766e-05, |
| "loss": 0.4166, |
| "step": 778 |
| }, |
| { |
| "epoch": 0.9396863691194209, |
| "grad_norm": 0.35020953986328074, |
| "learning_rate": 3.8159070598748884e-05, |
| "loss": 0.4244, |
| "step": 779 |
| }, |
| { |
| "epoch": 0.9408926417370326, |
| "grad_norm": 0.3670307965989599, |
| "learning_rate": 3.8136729222520105e-05, |
| "loss": 0.4629, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.9420989143546441, |
| "grad_norm": 1.247256002324837, |
| "learning_rate": 3.811438784629133e-05, |
| "loss": 0.441, |
| "step": 781 |
| }, |
| { |
| "epoch": 0.9433051869722557, |
| "grad_norm": 0.3886395489272093, |
| "learning_rate": 3.809204647006256e-05, |
| "loss": 0.4312, |
| "step": 782 |
| }, |
| { |
| "epoch": 0.9445114595898673, |
| "grad_norm": 0.3762586952639499, |
| "learning_rate": 3.806970509383378e-05, |
| "loss": 0.429, |
| "step": 783 |
| }, |
| { |
| "epoch": 0.9457177322074789, |
| "grad_norm": 0.35565515533530456, |
| "learning_rate": 3.804736371760501e-05, |
| "loss": 0.4255, |
| "step": 784 |
| }, |
| { |
| "epoch": 0.9469240048250904, |
| "grad_norm": 0.33572714507490936, |
| "learning_rate": 3.802502234137623e-05, |
| "loss": 0.4103, |
| "step": 785 |
| }, |
| { |
| "epoch": 0.9481302774427021, |
| "grad_norm": 0.4421855761325749, |
| "learning_rate": 3.800268096514745e-05, |
| "loss": 0.4156, |
| "step": 786 |
| }, |
| { |
| "epoch": 0.9493365500603136, |
| "grad_norm": 0.3481572570009158, |
| "learning_rate": 3.798033958891868e-05, |
| "loss": 0.4246, |
| "step": 787 |
| }, |
| { |
| "epoch": 0.9505428226779252, |
| "grad_norm": 0.5149461446668178, |
| "learning_rate": 3.7957998212689905e-05, |
| "loss": 0.4447, |
| "step": 788 |
| }, |
| { |
| "epoch": 0.9517490952955368, |
| "grad_norm": 0.32657057689577534, |
| "learning_rate": 3.7935656836461125e-05, |
| "loss": 0.4223, |
| "step": 789 |
| }, |
| { |
| "epoch": 0.9529553679131484, |
| "grad_norm": 0.440663289717245, |
| "learning_rate": 3.791331546023235e-05, |
| "loss": 0.4075, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.9541616405307599, |
| "grad_norm": 0.49488905419238877, |
| "learning_rate": 3.789097408400358e-05, |
| "loss": 0.4131, |
| "step": 791 |
| }, |
| { |
| "epoch": 0.9553679131483716, |
| "grad_norm": 0.3722447138794308, |
| "learning_rate": 3.78686327077748e-05, |
| "loss": 0.4196, |
| "step": 792 |
| }, |
| { |
| "epoch": 0.9565741857659831, |
| "grad_norm": 0.46844136877295967, |
| "learning_rate": 3.784629133154602e-05, |
| "loss": 0.3922, |
| "step": 793 |
| }, |
| { |
| "epoch": 0.9577804583835947, |
| "grad_norm": 0.3240774092465061, |
| "learning_rate": 3.782394995531725e-05, |
| "loss": 0.4453, |
| "step": 794 |
| }, |
| { |
| "epoch": 0.9589867310012062, |
| "grad_norm": 0.4267932674577182, |
| "learning_rate": 3.780160857908847e-05, |
| "loss": 0.4405, |
| "step": 795 |
| }, |
| { |
| "epoch": 0.9601930036188179, |
| "grad_norm": 0.3801824902130055, |
| "learning_rate": 3.77792672028597e-05, |
| "loss": 0.4295, |
| "step": 796 |
| }, |
| { |
| "epoch": 0.9613992762364294, |
| "grad_norm": 0.3892559150634845, |
| "learning_rate": 3.7756925826630925e-05, |
| "loss": 0.41, |
| "step": 797 |
| }, |
| { |
| "epoch": 0.962605548854041, |
| "grad_norm": 0.4667236925840342, |
| "learning_rate": 3.7734584450402145e-05, |
| "loss": 0.4374, |
| "step": 798 |
| }, |
| { |
| "epoch": 0.9638118214716526, |
| "grad_norm": 0.4352607128892435, |
| "learning_rate": 3.771224307417337e-05, |
| "loss": 0.4471, |
| "step": 799 |
| }, |
| { |
| "epoch": 0.9650180940892642, |
| "grad_norm": 0.3900629779626532, |
| "learning_rate": 3.7689901697944594e-05, |
| "loss": 0.4073, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.9662243667068757, |
| "grad_norm": 0.43402977782983276, |
| "learning_rate": 3.7667560321715814e-05, |
| "loss": 0.4211, |
| "step": 801 |
| }, |
| { |
| "epoch": 0.9674306393244874, |
| "grad_norm": 0.3672704556853505, |
| "learning_rate": 3.764521894548704e-05, |
| "loss": 0.4289, |
| "step": 802 |
| }, |
| { |
| "epoch": 0.9686369119420989, |
| "grad_norm": 0.4648133025117742, |
| "learning_rate": 3.762287756925827e-05, |
| "loss": 0.4105, |
| "step": 803 |
| }, |
| { |
| "epoch": 0.9698431845597105, |
| "grad_norm": 0.43049540796677793, |
| "learning_rate": 3.760053619302949e-05, |
| "loss": 0.4462, |
| "step": 804 |
| }, |
| { |
| "epoch": 0.971049457177322, |
| "grad_norm": 0.4007241405451893, |
| "learning_rate": 3.757819481680072e-05, |
| "loss": 0.4373, |
| "step": 805 |
| }, |
| { |
| "epoch": 0.9722557297949337, |
| "grad_norm": 0.5109239018044767, |
| "learning_rate": 3.7555853440571945e-05, |
| "loss": 0.4517, |
| "step": 806 |
| }, |
| { |
| "epoch": 0.9734620024125452, |
| "grad_norm": 0.3488495344511992, |
| "learning_rate": 3.7533512064343166e-05, |
| "loss": 0.4318, |
| "step": 807 |
| }, |
| { |
| "epoch": 0.9746682750301568, |
| "grad_norm": 0.40087038818480636, |
| "learning_rate": 3.7511170688114386e-05, |
| "loss": 0.4323, |
| "step": 808 |
| }, |
| { |
| "epoch": 0.9758745476477684, |
| "grad_norm": 0.42484548812497636, |
| "learning_rate": 3.7488829311885614e-05, |
| "loss": 0.4263, |
| "step": 809 |
| }, |
| { |
| "epoch": 0.97708082026538, |
| "grad_norm": 0.4605312280800588, |
| "learning_rate": 3.746648793565684e-05, |
| "loss": 0.4176, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.9782870928829915, |
| "grad_norm": 0.3833013635577786, |
| "learning_rate": 3.744414655942806e-05, |
| "loss": 0.4191, |
| "step": 811 |
| }, |
| { |
| "epoch": 0.9794933655006032, |
| "grad_norm": 0.5426098231658908, |
| "learning_rate": 3.742180518319929e-05, |
| "loss": 0.4177, |
| "step": 812 |
| }, |
| { |
| "epoch": 0.9806996381182147, |
| "grad_norm": 0.3580235511736533, |
| "learning_rate": 3.739946380697052e-05, |
| "loss": 0.419, |
| "step": 813 |
| }, |
| { |
| "epoch": 0.9819059107358263, |
| "grad_norm": 0.5434773850238956, |
| "learning_rate": 3.737712243074174e-05, |
| "loss": 0.4046, |
| "step": 814 |
| }, |
| { |
| "epoch": 0.9831121833534379, |
| "grad_norm": 0.42850270847902283, |
| "learning_rate": 3.735478105451296e-05, |
| "loss": 0.4463, |
| "step": 815 |
| }, |
| { |
| "epoch": 0.9843184559710495, |
| "grad_norm": 0.47719801180992827, |
| "learning_rate": 3.7332439678284186e-05, |
| "loss": 0.4071, |
| "step": 816 |
| }, |
| { |
| "epoch": 0.985524728588661, |
| "grad_norm": 0.5226031258479484, |
| "learning_rate": 3.7310098302055407e-05, |
| "loss": 0.439, |
| "step": 817 |
| }, |
| { |
| "epoch": 0.9867310012062727, |
| "grad_norm": 0.431981443960303, |
| "learning_rate": 3.7287756925826634e-05, |
| "loss": 0.4213, |
| "step": 818 |
| }, |
| { |
| "epoch": 0.9879372738238842, |
| "grad_norm": 0.494267544185392, |
| "learning_rate": 3.726541554959786e-05, |
| "loss": 0.4159, |
| "step": 819 |
| }, |
| { |
| "epoch": 0.9891435464414958, |
| "grad_norm": 0.4521890760596943, |
| "learning_rate": 3.724307417336908e-05, |
| "loss": 0.4219, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.9903498190591074, |
| "grad_norm": 0.41115842360041144, |
| "learning_rate": 3.722073279714031e-05, |
| "loss": 0.4344, |
| "step": 821 |
| }, |
| { |
| "epoch": 0.991556091676719, |
| "grad_norm": 0.44776288282241156, |
| "learning_rate": 3.719839142091153e-05, |
| "loss": 0.4192, |
| "step": 822 |
| }, |
| { |
| "epoch": 0.9927623642943305, |
| "grad_norm": 0.3978722176491912, |
| "learning_rate": 3.717605004468275e-05, |
| "loss": 0.413, |
| "step": 823 |
| }, |
| { |
| "epoch": 0.9939686369119421, |
| "grad_norm": 0.5012546257517893, |
| "learning_rate": 3.715370866845398e-05, |
| "loss": 0.4323, |
| "step": 824 |
| }, |
| { |
| "epoch": 0.9951749095295537, |
| "grad_norm": 0.43828941736087085, |
| "learning_rate": 3.7131367292225206e-05, |
| "loss": 0.4219, |
| "step": 825 |
| }, |
| { |
| "epoch": 0.9963811821471653, |
| "grad_norm": 0.432905255507051, |
| "learning_rate": 3.710902591599643e-05, |
| "loss": 0.4263, |
| "step": 826 |
| }, |
| { |
| "epoch": 0.9975874547647768, |
| "grad_norm": 0.3960220006036349, |
| "learning_rate": 3.7086684539767654e-05, |
| "loss": 0.4311, |
| "step": 827 |
| }, |
| { |
| "epoch": 0.9987937273823885, |
| "grad_norm": 0.3711890664614277, |
| "learning_rate": 3.7064343163538875e-05, |
| "loss": 0.4081, |
| "step": 828 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 0.31977027269476055, |
| "learning_rate": 3.7042001787310096e-05, |
| "loss": 0.4128, |
| "step": 829 |
| }, |
| { |
| "epoch": 1.0012062726176116, |
| "grad_norm": 0.405378808755483, |
| "learning_rate": 3.701966041108132e-05, |
| "loss": 0.3679, |
| "step": 830 |
| }, |
| { |
| "epoch": 1.002412545235223, |
| "grad_norm": 0.316147075545926, |
| "learning_rate": 3.699731903485255e-05, |
| "loss": 0.3622, |
| "step": 831 |
| }, |
| { |
| "epoch": 1.0036188178528347, |
| "grad_norm": 0.33536369086770434, |
| "learning_rate": 3.697497765862377e-05, |
| "loss": 0.3609, |
| "step": 832 |
| }, |
| { |
| "epoch": 1.0048250904704463, |
| "grad_norm": 0.3543589357049451, |
| "learning_rate": 3.6952636282395e-05, |
| "loss": 0.3658, |
| "step": 833 |
| }, |
| { |
| "epoch": 1.006031363088058, |
| "grad_norm": 0.35093876986866807, |
| "learning_rate": 3.6930294906166226e-05, |
| "loss": 0.3741, |
| "step": 834 |
| }, |
| { |
| "epoch": 1.0072376357056694, |
| "grad_norm": 0.40714167335636686, |
| "learning_rate": 3.690795352993745e-05, |
| "loss": 0.3576, |
| "step": 835 |
| }, |
| { |
| "epoch": 1.008443908323281, |
| "grad_norm": 0.36075988738168036, |
| "learning_rate": 3.688561215370867e-05, |
| "loss": 0.3676, |
| "step": 836 |
| }, |
| { |
| "epoch": 1.0096501809408926, |
| "grad_norm": 0.45662381259214857, |
| "learning_rate": 3.6863270777479895e-05, |
| "loss": 0.367, |
| "step": 837 |
| }, |
| { |
| "epoch": 1.0108564535585043, |
| "grad_norm": 0.3389555200863269, |
| "learning_rate": 3.6840929401251116e-05, |
| "loss": 0.3571, |
| "step": 838 |
| }, |
| { |
| "epoch": 1.0120627261761157, |
| "grad_norm": 0.41115199270011005, |
| "learning_rate": 3.681858802502234e-05, |
| "loss": 0.3715, |
| "step": 839 |
| }, |
| { |
| "epoch": 1.0132689987937273, |
| "grad_norm": 0.3689682514541236, |
| "learning_rate": 3.679624664879357e-05, |
| "loss": 0.3679, |
| "step": 840 |
| }, |
| { |
| "epoch": 1.014475271411339, |
| "grad_norm": 0.4023860133335284, |
| "learning_rate": 3.677390527256479e-05, |
| "loss": 0.3477, |
| "step": 841 |
| }, |
| { |
| "epoch": 1.0156815440289506, |
| "grad_norm": 0.322524044360211, |
| "learning_rate": 3.675156389633602e-05, |
| "loss": 0.3385, |
| "step": 842 |
| }, |
| { |
| "epoch": 1.016887816646562, |
| "grad_norm": 0.45344836998974947, |
| "learning_rate": 3.672922252010724e-05, |
| "loss": 0.3761, |
| "step": 843 |
| }, |
| { |
| "epoch": 1.0180940892641737, |
| "grad_norm": 0.34647812053404026, |
| "learning_rate": 3.670688114387846e-05, |
| "loss": 0.3493, |
| "step": 844 |
| }, |
| { |
| "epoch": 1.0193003618817853, |
| "grad_norm": 0.38238665236951674, |
| "learning_rate": 3.668453976764969e-05, |
| "loss": 0.3842, |
| "step": 845 |
| }, |
| { |
| "epoch": 1.020506634499397, |
| "grad_norm": 0.39750349528942325, |
| "learning_rate": 3.6662198391420915e-05, |
| "loss": 0.3548, |
| "step": 846 |
| }, |
| { |
| "epoch": 1.0217129071170084, |
| "grad_norm": 0.352018046564739, |
| "learning_rate": 3.6639857015192136e-05, |
| "loss": 0.3615, |
| "step": 847 |
| }, |
| { |
| "epoch": 1.02291917973462, |
| "grad_norm": 0.399580936497818, |
| "learning_rate": 3.6617515638963363e-05, |
| "loss": 0.362, |
| "step": 848 |
| }, |
| { |
| "epoch": 1.0241254523522316, |
| "grad_norm": 0.33323256118835304, |
| "learning_rate": 3.659517426273459e-05, |
| "loss": 0.3675, |
| "step": 849 |
| }, |
| { |
| "epoch": 1.0253317249698433, |
| "grad_norm": 0.39773698978442645, |
| "learning_rate": 3.657283288650581e-05, |
| "loss": 0.3561, |
| "step": 850 |
| }, |
| { |
| "epoch": 1.0265379975874547, |
| "grad_norm": 0.43166150787321717, |
| "learning_rate": 3.655049151027703e-05, |
| "loss": 0.3526, |
| "step": 851 |
| }, |
| { |
| "epoch": 1.0277442702050663, |
| "grad_norm": 0.2904670204346489, |
| "learning_rate": 3.652815013404826e-05, |
| "loss": 0.3703, |
| "step": 852 |
| }, |
| { |
| "epoch": 1.028950542822678, |
| "grad_norm": 0.36983080433044474, |
| "learning_rate": 3.650580875781948e-05, |
| "loss": 0.3624, |
| "step": 853 |
| }, |
| { |
| "epoch": 1.0301568154402896, |
| "grad_norm": 0.34943281477018856, |
| "learning_rate": 3.648346738159071e-05, |
| "loss": 0.3705, |
| "step": 854 |
| }, |
| { |
| "epoch": 1.031363088057901, |
| "grad_norm": 0.3090520555256778, |
| "learning_rate": 3.6461126005361935e-05, |
| "loss": 0.3849, |
| "step": 855 |
| }, |
| { |
| "epoch": 1.0325693606755126, |
| "grad_norm": 0.3792763380912474, |
| "learning_rate": 3.6438784629133156e-05, |
| "loss": 0.343, |
| "step": 856 |
| }, |
| { |
| "epoch": 1.0337756332931243, |
| "grad_norm": 0.3358613723632487, |
| "learning_rate": 3.6416443252904384e-05, |
| "loss": 0.342, |
| "step": 857 |
| }, |
| { |
| "epoch": 1.034981905910736, |
| "grad_norm": 0.31597184846375864, |
| "learning_rate": 3.6394101876675604e-05, |
| "loss": 0.3928, |
| "step": 858 |
| }, |
| { |
| "epoch": 1.0361881785283473, |
| "grad_norm": 0.39698670274757913, |
| "learning_rate": 3.6371760500446825e-05, |
| "loss": 0.3701, |
| "step": 859 |
| }, |
| { |
| "epoch": 1.037394451145959, |
| "grad_norm": 0.3193347171316655, |
| "learning_rate": 3.634941912421805e-05, |
| "loss": 0.3583, |
| "step": 860 |
| }, |
| { |
| "epoch": 1.0386007237635706, |
| "grad_norm": 0.32500721830435475, |
| "learning_rate": 3.632707774798928e-05, |
| "loss": 0.3774, |
| "step": 861 |
| }, |
| { |
| "epoch": 1.0398069963811822, |
| "grad_norm": 0.3097308325536179, |
| "learning_rate": 3.63047363717605e-05, |
| "loss": 0.3527, |
| "step": 862 |
| }, |
| { |
| "epoch": 1.0410132689987937, |
| "grad_norm": 0.30675170250681827, |
| "learning_rate": 3.628239499553173e-05, |
| "loss": 0.3693, |
| "step": 863 |
| }, |
| { |
| "epoch": 1.0422195416164053, |
| "grad_norm": 0.2940557941988533, |
| "learning_rate": 3.6260053619302956e-05, |
| "loss": 0.3662, |
| "step": 864 |
| }, |
| { |
| "epoch": 1.043425814234017, |
| "grad_norm": 0.32259841695392805, |
| "learning_rate": 3.6237712243074176e-05, |
| "loss": 0.3791, |
| "step": 865 |
| }, |
| { |
| "epoch": 1.0446320868516286, |
| "grad_norm": 0.31457425401619904, |
| "learning_rate": 3.62153708668454e-05, |
| "loss": 0.3856, |
| "step": 866 |
| }, |
| { |
| "epoch": 1.04583835946924, |
| "grad_norm": 0.36463813044162835, |
| "learning_rate": 3.6193029490616625e-05, |
| "loss": 0.3528, |
| "step": 867 |
| }, |
| { |
| "epoch": 1.0470446320868516, |
| "grad_norm": 0.3370156737832902, |
| "learning_rate": 3.6170688114387845e-05, |
| "loss": 0.3598, |
| "step": 868 |
| }, |
| { |
| "epoch": 1.0482509047044632, |
| "grad_norm": 0.3761941351812865, |
| "learning_rate": 3.614834673815907e-05, |
| "loss": 0.3501, |
| "step": 869 |
| }, |
| { |
| "epoch": 1.0494571773220749, |
| "grad_norm": 0.3246343590333649, |
| "learning_rate": 3.61260053619303e-05, |
| "loss": 0.3817, |
| "step": 870 |
| }, |
| { |
| "epoch": 1.0506634499396863, |
| "grad_norm": 0.40612621160744156, |
| "learning_rate": 3.610366398570152e-05, |
| "loss": 0.3669, |
| "step": 871 |
| }, |
| { |
| "epoch": 1.051869722557298, |
| "grad_norm": 0.39855243669619894, |
| "learning_rate": 3.608132260947275e-05, |
| "loss": 0.3649, |
| "step": 872 |
| }, |
| { |
| "epoch": 1.0530759951749096, |
| "grad_norm": 0.32860941997340953, |
| "learning_rate": 3.605898123324397e-05, |
| "loss": 0.3615, |
| "step": 873 |
| }, |
| { |
| "epoch": 1.0542822677925212, |
| "grad_norm": 0.4135141126588348, |
| "learning_rate": 3.603663985701519e-05, |
| "loss": 0.3576, |
| "step": 874 |
| }, |
| { |
| "epoch": 1.0554885404101326, |
| "grad_norm": 0.3238167077263733, |
| "learning_rate": 3.601429848078642e-05, |
| "loss": 0.3528, |
| "step": 875 |
| }, |
| { |
| "epoch": 1.0566948130277443, |
| "grad_norm": 0.3660474075113215, |
| "learning_rate": 3.5991957104557645e-05, |
| "loss": 0.3613, |
| "step": 876 |
| }, |
| { |
| "epoch": 1.057901085645356, |
| "grad_norm": 0.33674061414084067, |
| "learning_rate": 3.5969615728328865e-05, |
| "loss": 0.3585, |
| "step": 877 |
| }, |
| { |
| "epoch": 1.0591073582629675, |
| "grad_norm": 0.3246101881612825, |
| "learning_rate": 3.594727435210009e-05, |
| "loss": 0.3692, |
| "step": 878 |
| }, |
| { |
| "epoch": 1.060313630880579, |
| "grad_norm": 0.3411005351843354, |
| "learning_rate": 3.592493297587132e-05, |
| "loss": 0.3615, |
| "step": 879 |
| }, |
| { |
| "epoch": 1.0615199034981906, |
| "grad_norm": 0.3108590519815204, |
| "learning_rate": 3.590259159964254e-05, |
| "loss": 0.3594, |
| "step": 880 |
| }, |
| { |
| "epoch": 1.0627261761158022, |
| "grad_norm": 0.3434174584609721, |
| "learning_rate": 3.588025022341376e-05, |
| "loss": 0.3568, |
| "step": 881 |
| }, |
| { |
| "epoch": 1.0639324487334139, |
| "grad_norm": 0.27495195172330483, |
| "learning_rate": 3.585790884718499e-05, |
| "loss": 0.3621, |
| "step": 882 |
| }, |
| { |
| "epoch": 1.0651387213510253, |
| "grad_norm": 0.3604762512563549, |
| "learning_rate": 3.583556747095621e-05, |
| "loss": 0.3718, |
| "step": 883 |
| }, |
| { |
| "epoch": 1.066344993968637, |
| "grad_norm": 0.3218209404107597, |
| "learning_rate": 3.581322609472744e-05, |
| "loss": 0.3493, |
| "step": 884 |
| }, |
| { |
| "epoch": 1.0675512665862485, |
| "grad_norm": 0.320576472870977, |
| "learning_rate": 3.5790884718498665e-05, |
| "loss": 0.3683, |
| "step": 885 |
| }, |
| { |
| "epoch": 1.06875753920386, |
| "grad_norm": 0.3073606422293175, |
| "learning_rate": 3.5768543342269886e-05, |
| "loss": 0.3788, |
| "step": 886 |
| }, |
| { |
| "epoch": 1.0699638118214716, |
| "grad_norm": 0.36492025212381535, |
| "learning_rate": 3.5746201966041106e-05, |
| "loss": 0.348, |
| "step": 887 |
| }, |
| { |
| "epoch": 1.0711700844390832, |
| "grad_norm": 0.3154824646598146, |
| "learning_rate": 3.5723860589812334e-05, |
| "loss": 0.3863, |
| "step": 888 |
| }, |
| { |
| "epoch": 1.0723763570566949, |
| "grad_norm": 0.38244729654560733, |
| "learning_rate": 3.5701519213583554e-05, |
| "loss": 0.3634, |
| "step": 889 |
| }, |
| { |
| "epoch": 1.0735826296743065, |
| "grad_norm": 0.29567778156519975, |
| "learning_rate": 3.567917783735478e-05, |
| "loss": 0.3703, |
| "step": 890 |
| }, |
| { |
| "epoch": 1.074788902291918, |
| "grad_norm": 0.8554106402979569, |
| "learning_rate": 3.565683646112601e-05, |
| "loss": 0.3794, |
| "step": 891 |
| }, |
| { |
| "epoch": 1.0759951749095296, |
| "grad_norm": 0.3030975645200047, |
| "learning_rate": 3.563449508489723e-05, |
| "loss": 0.3605, |
| "step": 892 |
| }, |
| { |
| "epoch": 1.0772014475271412, |
| "grad_norm": 0.2832483178708839, |
| "learning_rate": 3.561215370866846e-05, |
| "loss": 0.3473, |
| "step": 893 |
| }, |
| { |
| "epoch": 1.0784077201447526, |
| "grad_norm": 0.3279420805673933, |
| "learning_rate": 3.558981233243968e-05, |
| "loss": 0.3616, |
| "step": 894 |
| }, |
| { |
| "epoch": 1.0796139927623642, |
| "grad_norm": 0.3173532398101509, |
| "learning_rate": 3.55674709562109e-05, |
| "loss": 0.3578, |
| "step": 895 |
| }, |
| { |
| "epoch": 1.0808202653799759, |
| "grad_norm": 0.3225132430413757, |
| "learning_rate": 3.5545129579982126e-05, |
| "loss": 0.3717, |
| "step": 896 |
| }, |
| { |
| "epoch": 1.0820265379975875, |
| "grad_norm": 0.30488512630939474, |
| "learning_rate": 3.5522788203753354e-05, |
| "loss": 0.3559, |
| "step": 897 |
| }, |
| { |
| "epoch": 1.083232810615199, |
| "grad_norm": 0.3737259775563492, |
| "learning_rate": 3.5500446827524575e-05, |
| "loss": 0.3659, |
| "step": 898 |
| }, |
| { |
| "epoch": 1.0844390832328106, |
| "grad_norm": 0.29431467422973967, |
| "learning_rate": 3.54781054512958e-05, |
| "loss": 0.3626, |
| "step": 899 |
| }, |
| { |
| "epoch": 1.0856453558504222, |
| "grad_norm": 0.3056774050205318, |
| "learning_rate": 3.545576407506703e-05, |
| "loss": 0.3435, |
| "step": 900 |
| }, |
| { |
| "epoch": 1.0868516284680338, |
| "grad_norm": 0.2800691429045292, |
| "learning_rate": 3.543342269883825e-05, |
| "loss": 0.3554, |
| "step": 901 |
| }, |
| { |
| "epoch": 1.0880579010856453, |
| "grad_norm": 0.33666126556125986, |
| "learning_rate": 3.541108132260947e-05, |
| "loss": 0.3893, |
| "step": 902 |
| }, |
| { |
| "epoch": 1.089264173703257, |
| "grad_norm": 0.31181311277504536, |
| "learning_rate": 3.53887399463807e-05, |
| "loss": 0.3858, |
| "step": 903 |
| }, |
| { |
| "epoch": 1.0904704463208685, |
| "grad_norm": 0.33517625298580916, |
| "learning_rate": 3.536639857015192e-05, |
| "loss": 0.3512, |
| "step": 904 |
| }, |
| { |
| "epoch": 1.0916767189384802, |
| "grad_norm": 0.3426706216867623, |
| "learning_rate": 3.534405719392315e-05, |
| "loss": 0.3789, |
| "step": 905 |
| }, |
| { |
| "epoch": 1.0928829915560916, |
| "grad_norm": 0.3566325293955928, |
| "learning_rate": 3.5321715817694374e-05, |
| "loss": 0.3731, |
| "step": 906 |
| }, |
| { |
| "epoch": 1.0940892641737032, |
| "grad_norm": 0.3237953484166762, |
| "learning_rate": 3.5299374441465595e-05, |
| "loss": 0.3529, |
| "step": 907 |
| }, |
| { |
| "epoch": 1.0952955367913149, |
| "grad_norm": 0.4230160362167563, |
| "learning_rate": 3.527703306523682e-05, |
| "loss": 0.3637, |
| "step": 908 |
| }, |
| { |
| "epoch": 1.0965018094089265, |
| "grad_norm": 0.3287139538164379, |
| "learning_rate": 3.525469168900804e-05, |
| "loss": 0.3636, |
| "step": 909 |
| }, |
| { |
| "epoch": 1.097708082026538, |
| "grad_norm": 0.38658111001916495, |
| "learning_rate": 3.5232350312779264e-05, |
| "loss": 0.3742, |
| "step": 910 |
| }, |
| { |
| "epoch": 1.0989143546441495, |
| "grad_norm": 0.30256158696471586, |
| "learning_rate": 3.521000893655049e-05, |
| "loss": 0.3767, |
| "step": 911 |
| }, |
| { |
| "epoch": 1.1001206272617612, |
| "grad_norm": 0.4143128405160983, |
| "learning_rate": 3.518766756032172e-05, |
| "loss": 0.3865, |
| "step": 912 |
| }, |
| { |
| "epoch": 1.1013268998793728, |
| "grad_norm": 0.3179444203982378, |
| "learning_rate": 3.516532618409294e-05, |
| "loss": 0.3675, |
| "step": 913 |
| }, |
| { |
| "epoch": 1.1025331724969842, |
| "grad_norm": 0.36029708813831, |
| "learning_rate": 3.514298480786417e-05, |
| "loss": 0.3677, |
| "step": 914 |
| }, |
| { |
| "epoch": 1.1037394451145959, |
| "grad_norm": 0.29429182950704297, |
| "learning_rate": 3.5120643431635394e-05, |
| "loss": 0.3487, |
| "step": 915 |
| }, |
| { |
| "epoch": 1.1049457177322075, |
| "grad_norm": 0.37996208960282496, |
| "learning_rate": 3.5098302055406615e-05, |
| "loss": 0.3811, |
| "step": 916 |
| }, |
| { |
| "epoch": 1.1061519903498191, |
| "grad_norm": 0.7625080539920996, |
| "learning_rate": 3.5075960679177836e-05, |
| "loss": 0.4344, |
| "step": 917 |
| }, |
| { |
| "epoch": 1.1073582629674306, |
| "grad_norm": 0.3233237284317067, |
| "learning_rate": 3.505361930294906e-05, |
| "loss": 0.3478, |
| "step": 918 |
| }, |
| { |
| "epoch": 1.1085645355850422, |
| "grad_norm": 0.32634118122886424, |
| "learning_rate": 3.5031277926720284e-05, |
| "loss": 0.3608, |
| "step": 919 |
| }, |
| { |
| "epoch": 1.1097708082026538, |
| "grad_norm": 0.34423353361517317, |
| "learning_rate": 3.500893655049151e-05, |
| "loss": 0.3631, |
| "step": 920 |
| }, |
| { |
| "epoch": 1.1109770808202655, |
| "grad_norm": 0.3562353951470559, |
| "learning_rate": 3.498659517426274e-05, |
| "loss": 0.3673, |
| "step": 921 |
| }, |
| { |
| "epoch": 1.1121833534378769, |
| "grad_norm": 0.3702325265210759, |
| "learning_rate": 3.496425379803396e-05, |
| "loss": 0.3749, |
| "step": 922 |
| }, |
| { |
| "epoch": 1.1133896260554885, |
| "grad_norm": 0.4029117460745025, |
| "learning_rate": 3.494191242180519e-05, |
| "loss": 0.3601, |
| "step": 923 |
| }, |
| { |
| "epoch": 1.1145958986731002, |
| "grad_norm": 0.36246580149698215, |
| "learning_rate": 3.491957104557641e-05, |
| "loss": 0.3629, |
| "step": 924 |
| }, |
| { |
| "epoch": 1.1158021712907118, |
| "grad_norm": 0.38132206897614723, |
| "learning_rate": 3.489722966934763e-05, |
| "loss": 0.3814, |
| "step": 925 |
| }, |
| { |
| "epoch": 1.1170084439083232, |
| "grad_norm": 0.3387624425697889, |
| "learning_rate": 3.4874888293118856e-05, |
| "loss": 0.3629, |
| "step": 926 |
| }, |
| { |
| "epoch": 1.1182147165259348, |
| "grad_norm": 0.3678799590845831, |
| "learning_rate": 3.485254691689008e-05, |
| "loss": 0.3667, |
| "step": 927 |
| }, |
| { |
| "epoch": 1.1194209891435465, |
| "grad_norm": 0.3868471391215961, |
| "learning_rate": 3.4830205540661304e-05, |
| "loss": 0.3531, |
| "step": 928 |
| }, |
| { |
| "epoch": 1.1206272617611581, |
| "grad_norm": 0.35802571950959106, |
| "learning_rate": 3.480786416443253e-05, |
| "loss": 0.3728, |
| "step": 929 |
| }, |
| { |
| "epoch": 1.1218335343787695, |
| "grad_norm": 0.4527352418804462, |
| "learning_rate": 3.478552278820376e-05, |
| "loss": 0.3519, |
| "step": 930 |
| }, |
| { |
| "epoch": 1.1230398069963812, |
| "grad_norm": 0.38538894689912423, |
| "learning_rate": 3.476318141197498e-05, |
| "loss": 0.3654, |
| "step": 931 |
| }, |
| { |
| "epoch": 1.1242460796139928, |
| "grad_norm": 0.32599224994044007, |
| "learning_rate": 3.47408400357462e-05, |
| "loss": 0.3643, |
| "step": 932 |
| }, |
| { |
| "epoch": 1.1254523522316044, |
| "grad_norm": 0.4953992771024001, |
| "learning_rate": 3.471849865951743e-05, |
| "loss": 0.3627, |
| "step": 933 |
| }, |
| { |
| "epoch": 1.1266586248492159, |
| "grad_norm": 0.3186598726280072, |
| "learning_rate": 3.4696157283288655e-05, |
| "loss": 0.3565, |
| "step": 934 |
| }, |
| { |
| "epoch": 1.1278648974668275, |
| "grad_norm": 0.4038468704612751, |
| "learning_rate": 3.4673815907059876e-05, |
| "loss": 0.3623, |
| "step": 935 |
| }, |
| { |
| "epoch": 1.1290711700844391, |
| "grad_norm": 0.3481809289328933, |
| "learning_rate": 3.4651474530831104e-05, |
| "loss": 0.3954, |
| "step": 936 |
| }, |
| { |
| "epoch": 1.1302774427020505, |
| "grad_norm": 0.3683916961882533, |
| "learning_rate": 3.462913315460233e-05, |
| "loss": 0.3349, |
| "step": 937 |
| }, |
| { |
| "epoch": 1.1314837153196622, |
| "grad_norm": 0.32473875080902603, |
| "learning_rate": 3.460679177837355e-05, |
| "loss": 0.3743, |
| "step": 938 |
| }, |
| { |
| "epoch": 1.1326899879372738, |
| "grad_norm": 0.3928948622915465, |
| "learning_rate": 3.458445040214477e-05, |
| "loss": 0.3568, |
| "step": 939 |
| }, |
| { |
| "epoch": 1.1338962605548855, |
| "grad_norm": 0.34316792512475985, |
| "learning_rate": 3.4562109025916e-05, |
| "loss": 0.3595, |
| "step": 940 |
| }, |
| { |
| "epoch": 1.135102533172497, |
| "grad_norm": 0.38074589547250975, |
| "learning_rate": 3.453976764968722e-05, |
| "loss": 0.3717, |
| "step": 941 |
| }, |
| { |
| "epoch": 1.1363088057901085, |
| "grad_norm": 0.3381612708943419, |
| "learning_rate": 3.451742627345845e-05, |
| "loss": 0.3628, |
| "step": 942 |
| }, |
| { |
| "epoch": 1.1375150784077201, |
| "grad_norm": 0.3918788189329609, |
| "learning_rate": 3.4495084897229676e-05, |
| "loss": 0.3402, |
| "step": 943 |
| }, |
| { |
| "epoch": 1.1387213510253318, |
| "grad_norm": 0.3657275116322395, |
| "learning_rate": 3.4472743521000896e-05, |
| "loss": 0.349, |
| "step": 944 |
| }, |
| { |
| "epoch": 1.1399276236429432, |
| "grad_norm": 0.4007355044780056, |
| "learning_rate": 3.4450402144772124e-05, |
| "loss": 0.3688, |
| "step": 945 |
| }, |
| { |
| "epoch": 1.1411338962605548, |
| "grad_norm": 0.4138120642712477, |
| "learning_rate": 3.4428060768543344e-05, |
| "loss": 0.3738, |
| "step": 946 |
| }, |
| { |
| "epoch": 1.1423401688781665, |
| "grad_norm": 0.4289328018683492, |
| "learning_rate": 3.4405719392314565e-05, |
| "loss": 0.3658, |
| "step": 947 |
| }, |
| { |
| "epoch": 1.143546441495778, |
| "grad_norm": 0.34457376242795956, |
| "learning_rate": 3.438337801608579e-05, |
| "loss": 0.3648, |
| "step": 948 |
| }, |
| { |
| "epoch": 1.1447527141133897, |
| "grad_norm": 0.39016387340771286, |
| "learning_rate": 3.436103663985702e-05, |
| "loss": 0.3851, |
| "step": 949 |
| }, |
| { |
| "epoch": 1.1459589867310012, |
| "grad_norm": 0.3794539802442255, |
| "learning_rate": 3.433869526362824e-05, |
| "loss": 0.3703, |
| "step": 950 |
| }, |
| { |
| "epoch": 1.1471652593486128, |
| "grad_norm": 0.3400125800009884, |
| "learning_rate": 3.431635388739947e-05, |
| "loss": 0.3651, |
| "step": 951 |
| }, |
| { |
| "epoch": 1.1483715319662244, |
| "grad_norm": 0.3962308094664021, |
| "learning_rate": 3.429401251117069e-05, |
| "loss": 0.3711, |
| "step": 952 |
| }, |
| { |
| "epoch": 1.1495778045838358, |
| "grad_norm": 0.29475072361506954, |
| "learning_rate": 3.427167113494191e-05, |
| "loss": 0.3649, |
| "step": 953 |
| }, |
| { |
| "epoch": 1.1507840772014475, |
| "grad_norm": 0.3599623229725101, |
| "learning_rate": 3.424932975871314e-05, |
| "loss": 0.3755, |
| "step": 954 |
| }, |
| { |
| "epoch": 1.1519903498190591, |
| "grad_norm": 0.3234762124366387, |
| "learning_rate": 3.4226988382484365e-05, |
| "loss": 0.3419, |
| "step": 955 |
| }, |
| { |
| "epoch": 1.1531966224366708, |
| "grad_norm": 0.36396822545574137, |
| "learning_rate": 3.4204647006255585e-05, |
| "loss": 0.3589, |
| "step": 956 |
| }, |
| { |
| "epoch": 1.1544028950542822, |
| "grad_norm": 0.2965621293534876, |
| "learning_rate": 3.418230563002681e-05, |
| "loss": 0.3408, |
| "step": 957 |
| }, |
| { |
| "epoch": 1.1556091676718938, |
| "grad_norm": 0.40185597700007947, |
| "learning_rate": 3.415996425379804e-05, |
| "loss": 0.3767, |
| "step": 958 |
| }, |
| { |
| "epoch": 1.1568154402895054, |
| "grad_norm": 0.2814563352081147, |
| "learning_rate": 3.413762287756926e-05, |
| "loss": 0.3489, |
| "step": 959 |
| }, |
| { |
| "epoch": 1.158021712907117, |
| "grad_norm": 0.39116431668529056, |
| "learning_rate": 3.411528150134048e-05, |
| "loss": 0.3447, |
| "step": 960 |
| }, |
| { |
| "epoch": 1.1592279855247285, |
| "grad_norm": 0.37682364888520664, |
| "learning_rate": 3.409294012511171e-05, |
| "loss": 0.3699, |
| "step": 961 |
| }, |
| { |
| "epoch": 1.1604342581423401, |
| "grad_norm": 0.38866135079779807, |
| "learning_rate": 3.407059874888293e-05, |
| "loss": 0.379, |
| "step": 962 |
| }, |
| { |
| "epoch": 1.1616405307599518, |
| "grad_norm": 0.43349224212309945, |
| "learning_rate": 3.404825737265416e-05, |
| "loss": 0.3771, |
| "step": 963 |
| }, |
| { |
| "epoch": 1.1628468033775634, |
| "grad_norm": 0.3490798723729157, |
| "learning_rate": 3.4025915996425385e-05, |
| "loss": 0.366, |
| "step": 964 |
| }, |
| { |
| "epoch": 1.1640530759951748, |
| "grad_norm": 0.4291120472338438, |
| "learning_rate": 3.4003574620196606e-05, |
| "loss": 0.3735, |
| "step": 965 |
| }, |
| { |
| "epoch": 1.1652593486127865, |
| "grad_norm": 0.475166138749199, |
| "learning_rate": 3.398123324396783e-05, |
| "loss": 0.3856, |
| "step": 966 |
| }, |
| { |
| "epoch": 1.166465621230398, |
| "grad_norm": 0.44115919424080835, |
| "learning_rate": 3.3958891867739054e-05, |
| "loss": 0.3874, |
| "step": 967 |
| }, |
| { |
| "epoch": 1.1676718938480097, |
| "grad_norm": 0.4265603005387793, |
| "learning_rate": 3.3936550491510274e-05, |
| "loss": 0.3662, |
| "step": 968 |
| }, |
| { |
| "epoch": 1.1688781664656211, |
| "grad_norm": 0.3541937956792882, |
| "learning_rate": 3.39142091152815e-05, |
| "loss": 0.3678, |
| "step": 969 |
| }, |
| { |
| "epoch": 1.1700844390832328, |
| "grad_norm": 0.3983932739056756, |
| "learning_rate": 3.389186773905273e-05, |
| "loss": 0.3782, |
| "step": 970 |
| }, |
| { |
| "epoch": 1.1712907117008444, |
| "grad_norm": 0.2708040120469766, |
| "learning_rate": 3.386952636282395e-05, |
| "loss": 0.3508, |
| "step": 971 |
| }, |
| { |
| "epoch": 1.172496984318456, |
| "grad_norm": 0.38814511090940623, |
| "learning_rate": 3.384718498659518e-05, |
| "loss": 0.3674, |
| "step": 972 |
| }, |
| { |
| "epoch": 1.1737032569360675, |
| "grad_norm": 0.31952981884106635, |
| "learning_rate": 3.3824843610366405e-05, |
| "loss": 0.375, |
| "step": 973 |
| }, |
| { |
| "epoch": 1.174909529553679, |
| "grad_norm": 0.3405372128693083, |
| "learning_rate": 3.3802502234137626e-05, |
| "loss": 0.358, |
| "step": 974 |
| }, |
| { |
| "epoch": 1.1761158021712907, |
| "grad_norm": 0.325059651004217, |
| "learning_rate": 3.3780160857908846e-05, |
| "loss": 0.3643, |
| "step": 975 |
| }, |
| { |
| "epoch": 1.1773220747889024, |
| "grad_norm": 0.3125452451270937, |
| "learning_rate": 3.3757819481680074e-05, |
| "loss": 0.3586, |
| "step": 976 |
| }, |
| { |
| "epoch": 1.1785283474065138, |
| "grad_norm": 0.3352607281639852, |
| "learning_rate": 3.3735478105451295e-05, |
| "loss": 0.3688, |
| "step": 977 |
| }, |
| { |
| "epoch": 1.1797346200241254, |
| "grad_norm": 2.976439201363688, |
| "learning_rate": 3.371313672922252e-05, |
| "loss": 0.3982, |
| "step": 978 |
| }, |
| { |
| "epoch": 1.180940892641737, |
| "grad_norm": 0.4938721359348584, |
| "learning_rate": 3.369079535299375e-05, |
| "loss": 0.3763, |
| "step": 979 |
| }, |
| { |
| "epoch": 1.1821471652593487, |
| "grad_norm": 0.2726220782356802, |
| "learning_rate": 3.366845397676497e-05, |
| "loss": 0.3496, |
| "step": 980 |
| }, |
| { |
| "epoch": 1.1833534378769601, |
| "grad_norm": 0.3666194013950933, |
| "learning_rate": 3.36461126005362e-05, |
| "loss": 0.3567, |
| "step": 981 |
| }, |
| { |
| "epoch": 1.1845597104945718, |
| "grad_norm": 0.3989427724814577, |
| "learning_rate": 3.362377122430742e-05, |
| "loss": 0.3914, |
| "step": 982 |
| }, |
| { |
| "epoch": 1.1857659831121834, |
| "grad_norm": 0.35488518310984535, |
| "learning_rate": 3.360142984807864e-05, |
| "loss": 0.3816, |
| "step": 983 |
| }, |
| { |
| "epoch": 1.186972255729795, |
| "grad_norm": 0.4276280862353782, |
| "learning_rate": 3.3579088471849867e-05, |
| "loss": 0.3814, |
| "step": 984 |
| }, |
| { |
| "epoch": 1.1881785283474064, |
| "grad_norm": 0.3927987855722157, |
| "learning_rate": 3.3556747095621094e-05, |
| "loss": 0.3523, |
| "step": 985 |
| }, |
| { |
| "epoch": 1.189384800965018, |
| "grad_norm": 0.38737768232211683, |
| "learning_rate": 3.3534405719392315e-05, |
| "loss": 0.3823, |
| "step": 986 |
| }, |
| { |
| "epoch": 1.1905910735826297, |
| "grad_norm": 0.38347649772291015, |
| "learning_rate": 3.351206434316354e-05, |
| "loss": 0.3536, |
| "step": 987 |
| }, |
| { |
| "epoch": 1.1917973462002411, |
| "grad_norm": 0.30177462805239674, |
| "learning_rate": 3.348972296693477e-05, |
| "loss": 0.3736, |
| "step": 988 |
| }, |
| { |
| "epoch": 1.1930036188178528, |
| "grad_norm": 0.3551527270864664, |
| "learning_rate": 3.346738159070599e-05, |
| "loss": 0.3507, |
| "step": 989 |
| }, |
| { |
| "epoch": 1.1942098914354644, |
| "grad_norm": 0.3486555270352525, |
| "learning_rate": 3.344504021447721e-05, |
| "loss": 0.3971, |
| "step": 990 |
| }, |
| { |
| "epoch": 1.195416164053076, |
| "grad_norm": 0.3435059469091779, |
| "learning_rate": 3.342269883824844e-05, |
| "loss": 0.3561, |
| "step": 991 |
| }, |
| { |
| "epoch": 1.1966224366706877, |
| "grad_norm": 0.4189817765235362, |
| "learning_rate": 3.340035746201966e-05, |
| "loss": 0.3591, |
| "step": 992 |
| }, |
| { |
| "epoch": 1.197828709288299, |
| "grad_norm": 0.3122862791991749, |
| "learning_rate": 3.337801608579089e-05, |
| "loss": 0.3633, |
| "step": 993 |
| }, |
| { |
| "epoch": 1.1990349819059107, |
| "grad_norm": 0.34538308973829507, |
| "learning_rate": 3.3355674709562114e-05, |
| "loss": 0.3554, |
| "step": 994 |
| }, |
| { |
| "epoch": 1.2002412545235224, |
| "grad_norm": 0.3936966308431531, |
| "learning_rate": 3.3333333333333335e-05, |
| "loss": 0.3685, |
| "step": 995 |
| }, |
| { |
| "epoch": 1.2014475271411338, |
| "grad_norm": 0.2939043985854137, |
| "learning_rate": 3.331099195710456e-05, |
| "loss": 0.3707, |
| "step": 996 |
| }, |
| { |
| "epoch": 1.2026537997587454, |
| "grad_norm": 0.36151070128805723, |
| "learning_rate": 3.328865058087578e-05, |
| "loss": 0.3475, |
| "step": 997 |
| }, |
| { |
| "epoch": 1.203860072376357, |
| "grad_norm": 0.31584191649328586, |
| "learning_rate": 3.3266309204647004e-05, |
| "loss": 0.3614, |
| "step": 998 |
| }, |
| { |
| "epoch": 1.2050663449939687, |
| "grad_norm": 0.3065266896699921, |
| "learning_rate": 3.324396782841823e-05, |
| "loss": 0.3536, |
| "step": 999 |
| }, |
| { |
| "epoch": 1.2062726176115803, |
| "grad_norm": 0.3284469711990536, |
| "learning_rate": 3.322162645218946e-05, |
| "loss": 0.3709, |
| "step": 1000 |
| }, |
| { |
| "epoch": 1.2074788902291917, |
| "grad_norm": 0.28866547050936975, |
| "learning_rate": 3.319928507596068e-05, |
| "loss": 0.3685, |
| "step": 1001 |
| }, |
| { |
| "epoch": 1.2086851628468034, |
| "grad_norm": 0.33796420853850734, |
| "learning_rate": 3.317694369973191e-05, |
| "loss": 0.3652, |
| "step": 1002 |
| }, |
| { |
| "epoch": 1.209891435464415, |
| "grad_norm": 0.28342229294291926, |
| "learning_rate": 3.3154602323503134e-05, |
| "loss": 0.3337, |
| "step": 1003 |
| }, |
| { |
| "epoch": 1.2110977080820264, |
| "grad_norm": 0.333402960456291, |
| "learning_rate": 3.313226094727435e-05, |
| "loss": 0.3584, |
| "step": 1004 |
| }, |
| { |
| "epoch": 1.212303980699638, |
| "grad_norm": 0.3342504865352302, |
| "learning_rate": 3.3109919571045576e-05, |
| "loss": 0.3675, |
| "step": 1005 |
| }, |
| { |
| "epoch": 1.2135102533172497, |
| "grad_norm": 0.2913332665016377, |
| "learning_rate": 3.30875781948168e-05, |
| "loss": 0.3561, |
| "step": 1006 |
| }, |
| { |
| "epoch": 1.2147165259348613, |
| "grad_norm": 0.31253774645325566, |
| "learning_rate": 3.3065236818588024e-05, |
| "loss": 0.3625, |
| "step": 1007 |
| }, |
| { |
| "epoch": 1.215922798552473, |
| "grad_norm": 0.337334942984555, |
| "learning_rate": 3.304289544235925e-05, |
| "loss": 0.373, |
| "step": 1008 |
| }, |
| { |
| "epoch": 1.2171290711700844, |
| "grad_norm": 0.31936744219352725, |
| "learning_rate": 3.302055406613048e-05, |
| "loss": 0.3608, |
| "step": 1009 |
| }, |
| { |
| "epoch": 1.218335343787696, |
| "grad_norm": 0.36610894353405193, |
| "learning_rate": 3.29982126899017e-05, |
| "loss": 0.3673, |
| "step": 1010 |
| }, |
| { |
| "epoch": 1.2195416164053077, |
| "grad_norm": 0.3398885145604998, |
| "learning_rate": 3.297587131367292e-05, |
| "loss": 0.3524, |
| "step": 1011 |
| }, |
| { |
| "epoch": 1.220747889022919, |
| "grad_norm": 0.40102720999756397, |
| "learning_rate": 3.295352993744415e-05, |
| "loss": 0.3744, |
| "step": 1012 |
| }, |
| { |
| "epoch": 1.2219541616405307, |
| "grad_norm": 0.40168746016689283, |
| "learning_rate": 3.293118856121537e-05, |
| "loss": 0.369, |
| "step": 1013 |
| }, |
| { |
| "epoch": 1.2231604342581424, |
| "grad_norm": 0.3300948359331406, |
| "learning_rate": 3.2908847184986596e-05, |
| "loss": 0.3724, |
| "step": 1014 |
| }, |
| { |
| "epoch": 1.224366706875754, |
| "grad_norm": 0.38963810919167646, |
| "learning_rate": 3.2886505808757823e-05, |
| "loss": 0.3652, |
| "step": 1015 |
| }, |
| { |
| "epoch": 1.2255729794933654, |
| "grad_norm": 0.32140355256835956, |
| "learning_rate": 3.2864164432529044e-05, |
| "loss": 0.3715, |
| "step": 1016 |
| }, |
| { |
| "epoch": 1.226779252110977, |
| "grad_norm": 0.3483272281013881, |
| "learning_rate": 3.284182305630027e-05, |
| "loss": 0.3562, |
| "step": 1017 |
| }, |
| { |
| "epoch": 1.2279855247285887, |
| "grad_norm": 0.3309692180489672, |
| "learning_rate": 3.281948168007149e-05, |
| "loss": 0.3649, |
| "step": 1018 |
| }, |
| { |
| "epoch": 1.2291917973462003, |
| "grad_norm": 0.4077279870823795, |
| "learning_rate": 3.279714030384271e-05, |
| "loss": 0.3825, |
| "step": 1019 |
| }, |
| { |
| "epoch": 1.2303980699638117, |
| "grad_norm": 0.3150547142275848, |
| "learning_rate": 3.277479892761394e-05, |
| "loss": 0.3772, |
| "step": 1020 |
| }, |
| { |
| "epoch": 1.2316043425814234, |
| "grad_norm": 0.3798958280863762, |
| "learning_rate": 3.275245755138517e-05, |
| "loss": 0.3556, |
| "step": 1021 |
| }, |
| { |
| "epoch": 1.232810615199035, |
| "grad_norm": 0.31873298226111396, |
| "learning_rate": 3.273011617515639e-05, |
| "loss": 0.3635, |
| "step": 1022 |
| }, |
| { |
| "epoch": 1.2340168878166466, |
| "grad_norm": 0.336183080052793, |
| "learning_rate": 3.2707774798927616e-05, |
| "loss": 0.3714, |
| "step": 1023 |
| }, |
| { |
| "epoch": 1.235223160434258, |
| "grad_norm": 0.3799076103107832, |
| "learning_rate": 3.2685433422698844e-05, |
| "loss": 0.3736, |
| "step": 1024 |
| }, |
| { |
| "epoch": 1.2364294330518697, |
| "grad_norm": 0.28728541195117857, |
| "learning_rate": 3.2663092046470064e-05, |
| "loss": 0.3755, |
| "step": 1025 |
| }, |
| { |
| "epoch": 1.2376357056694813, |
| "grad_norm": 0.3232968103048298, |
| "learning_rate": 3.2640750670241285e-05, |
| "loss": 0.3717, |
| "step": 1026 |
| }, |
| { |
| "epoch": 1.238841978287093, |
| "grad_norm": 0.3301793860111746, |
| "learning_rate": 3.261840929401251e-05, |
| "loss": 0.3792, |
| "step": 1027 |
| }, |
| { |
| "epoch": 1.2400482509047044, |
| "grad_norm": 0.3143951692046986, |
| "learning_rate": 3.259606791778373e-05, |
| "loss": 0.3763, |
| "step": 1028 |
| }, |
| { |
| "epoch": 1.241254523522316, |
| "grad_norm": 0.32851664710301687, |
| "learning_rate": 3.257372654155496e-05, |
| "loss": 0.3652, |
| "step": 1029 |
| }, |
| { |
| "epoch": 1.2424607961399277, |
| "grad_norm": 0.3911048328835441, |
| "learning_rate": 3.255138516532619e-05, |
| "loss": 0.3616, |
| "step": 1030 |
| }, |
| { |
| "epoch": 1.2436670687575393, |
| "grad_norm": 0.27935714055304606, |
| "learning_rate": 3.252904378909741e-05, |
| "loss": 0.3596, |
| "step": 1031 |
| }, |
| { |
| "epoch": 1.2448733413751507, |
| "grad_norm": 0.3346861939949344, |
| "learning_rate": 3.2506702412868636e-05, |
| "loss": 0.3547, |
| "step": 1032 |
| }, |
| { |
| "epoch": 1.2460796139927623, |
| "grad_norm": 0.28091787367278925, |
| "learning_rate": 3.248436103663986e-05, |
| "loss": 0.3445, |
| "step": 1033 |
| }, |
| { |
| "epoch": 1.247285886610374, |
| "grad_norm": 0.29369101218699734, |
| "learning_rate": 3.246201966041108e-05, |
| "loss": 0.3597, |
| "step": 1034 |
| }, |
| { |
| "epoch": 1.2484921592279856, |
| "grad_norm": 0.29878313480140756, |
| "learning_rate": 3.2439678284182305e-05, |
| "loss": 0.3697, |
| "step": 1035 |
| }, |
| { |
| "epoch": 1.249698431845597, |
| "grad_norm": 0.32949020606504464, |
| "learning_rate": 3.241733690795353e-05, |
| "loss": 0.3685, |
| "step": 1036 |
| }, |
| { |
| "epoch": 1.2509047044632087, |
| "grad_norm": 0.2926475487110276, |
| "learning_rate": 3.2394995531724753e-05, |
| "loss": 0.3626, |
| "step": 1037 |
| }, |
| { |
| "epoch": 1.2521109770808203, |
| "grad_norm": 0.2859720388486372, |
| "learning_rate": 3.237265415549598e-05, |
| "loss": 0.3743, |
| "step": 1038 |
| }, |
| { |
| "epoch": 1.2533172496984317, |
| "grad_norm": 0.3402775775703186, |
| "learning_rate": 3.235031277926721e-05, |
| "loss": 0.3442, |
| "step": 1039 |
| }, |
| { |
| "epoch": 1.2545235223160434, |
| "grad_norm": 0.254391649837801, |
| "learning_rate": 3.232797140303843e-05, |
| "loss": 0.3594, |
| "step": 1040 |
| }, |
| { |
| "epoch": 1.255729794933655, |
| "grad_norm": 0.39919916051774196, |
| "learning_rate": 3.230563002680965e-05, |
| "loss": 0.3839, |
| "step": 1041 |
| }, |
| { |
| "epoch": 1.2569360675512666, |
| "grad_norm": 0.27024575975320314, |
| "learning_rate": 3.228328865058088e-05, |
| "loss": 0.3533, |
| "step": 1042 |
| }, |
| { |
| "epoch": 1.2581423401688783, |
| "grad_norm": 0.319622346903351, |
| "learning_rate": 3.22609472743521e-05, |
| "loss": 0.3794, |
| "step": 1043 |
| }, |
| { |
| "epoch": 1.2593486127864897, |
| "grad_norm": 0.389068060223662, |
| "learning_rate": 3.2238605898123325e-05, |
| "loss": 0.382, |
| "step": 1044 |
| }, |
| { |
| "epoch": 1.2605548854041013, |
| "grad_norm": 0.3268225256048267, |
| "learning_rate": 3.221626452189455e-05, |
| "loss": 0.3603, |
| "step": 1045 |
| }, |
| { |
| "epoch": 1.261761158021713, |
| "grad_norm": 0.3633092550933109, |
| "learning_rate": 3.2193923145665774e-05, |
| "loss": 0.3858, |
| "step": 1046 |
| }, |
| { |
| "epoch": 1.2629674306393244, |
| "grad_norm": 0.32431387176898796, |
| "learning_rate": 3.2171581769437e-05, |
| "loss": 0.3809, |
| "step": 1047 |
| }, |
| { |
| "epoch": 1.264173703256936, |
| "grad_norm": 0.3003671692231628, |
| "learning_rate": 3.214924039320822e-05, |
| "loss": 0.3654, |
| "step": 1048 |
| }, |
| { |
| "epoch": 1.2653799758745476, |
| "grad_norm": 0.3133031222068096, |
| "learning_rate": 3.212689901697944e-05, |
| "loss": 0.3542, |
| "step": 1049 |
| }, |
| { |
| "epoch": 1.2665862484921593, |
| "grad_norm": 0.27077930029731606, |
| "learning_rate": 3.210455764075067e-05, |
| "loss": 0.3732, |
| "step": 1050 |
| }, |
| { |
| "epoch": 1.267792521109771, |
| "grad_norm": 0.3054937382145299, |
| "learning_rate": 3.20822162645219e-05, |
| "loss": 0.3524, |
| "step": 1051 |
| }, |
| { |
| "epoch": 1.2689987937273823, |
| "grad_norm": 0.3178405295280176, |
| "learning_rate": 3.205987488829312e-05, |
| "loss": 0.3629, |
| "step": 1052 |
| }, |
| { |
| "epoch": 1.270205066344994, |
| "grad_norm": 0.31708936628519185, |
| "learning_rate": 3.2037533512064346e-05, |
| "loss": 0.381, |
| "step": 1053 |
| }, |
| { |
| "epoch": 1.2714113389626056, |
| "grad_norm": 0.24279020858927816, |
| "learning_rate": 3.201519213583557e-05, |
| "loss": 0.3411, |
| "step": 1054 |
| }, |
| { |
| "epoch": 1.272617611580217, |
| "grad_norm": 0.28116483364277983, |
| "learning_rate": 3.1992850759606794e-05, |
| "loss": 0.3676, |
| "step": 1055 |
| }, |
| { |
| "epoch": 1.2738238841978287, |
| "grad_norm": 0.3183015486291266, |
| "learning_rate": 3.1970509383378014e-05, |
| "loss": 0.3887, |
| "step": 1056 |
| }, |
| { |
| "epoch": 1.2750301568154403, |
| "grad_norm": 0.27640816805976137, |
| "learning_rate": 3.194816800714924e-05, |
| "loss": 0.3612, |
| "step": 1057 |
| }, |
| { |
| "epoch": 1.276236429433052, |
| "grad_norm": 0.2708010845965508, |
| "learning_rate": 3.192582663092047e-05, |
| "loss": 0.3549, |
| "step": 1058 |
| }, |
| { |
| "epoch": 1.2774427020506636, |
| "grad_norm": 0.3198350134586524, |
| "learning_rate": 3.190348525469169e-05, |
| "loss": 0.3726, |
| "step": 1059 |
| }, |
| { |
| "epoch": 1.278648974668275, |
| "grad_norm": 0.30699976358969727, |
| "learning_rate": 3.188114387846292e-05, |
| "loss": 0.3715, |
| "step": 1060 |
| }, |
| { |
| "epoch": 1.2798552472858866, |
| "grad_norm": 0.277809943346299, |
| "learning_rate": 3.1858802502234145e-05, |
| "loss": 0.3392, |
| "step": 1061 |
| }, |
| { |
| "epoch": 1.2810615199034983, |
| "grad_norm": 0.31996659071085015, |
| "learning_rate": 3.1836461126005366e-05, |
| "loss": 0.3803, |
| "step": 1062 |
| }, |
| { |
| "epoch": 1.2822677925211097, |
| "grad_norm": 0.33637724087127435, |
| "learning_rate": 3.1814119749776586e-05, |
| "loss": 0.3739, |
| "step": 1063 |
| }, |
| { |
| "epoch": 1.2834740651387213, |
| "grad_norm": 0.36483447767464733, |
| "learning_rate": 3.1791778373547814e-05, |
| "loss": 0.3475, |
| "step": 1064 |
| }, |
| { |
| "epoch": 1.284680337756333, |
| "grad_norm": 0.33797545906118115, |
| "learning_rate": 3.1769436997319035e-05, |
| "loss": 0.3559, |
| "step": 1065 |
| }, |
| { |
| "epoch": 1.2858866103739446, |
| "grad_norm": 0.3992655714858619, |
| "learning_rate": 3.174709562109026e-05, |
| "loss": 0.3496, |
| "step": 1066 |
| }, |
| { |
| "epoch": 1.2870928829915562, |
| "grad_norm": 0.3757105460842093, |
| "learning_rate": 3.172475424486149e-05, |
| "loss": 0.3574, |
| "step": 1067 |
| }, |
| { |
| "epoch": 1.2882991556091676, |
| "grad_norm": 0.3838514715771389, |
| "learning_rate": 3.170241286863271e-05, |
| "loss": 0.3714, |
| "step": 1068 |
| }, |
| { |
| "epoch": 1.2895054282267793, |
| "grad_norm": 0.3558726533103807, |
| "learning_rate": 3.168007149240393e-05, |
| "loss": 0.3607, |
| "step": 1069 |
| }, |
| { |
| "epoch": 1.290711700844391, |
| "grad_norm": 0.34928305006041077, |
| "learning_rate": 3.165773011617516e-05, |
| "loss": 0.3688, |
| "step": 1070 |
| }, |
| { |
| "epoch": 1.2919179734620023, |
| "grad_norm": 0.39750595875770633, |
| "learning_rate": 3.163538873994638e-05, |
| "loss": 0.3585, |
| "step": 1071 |
| }, |
| { |
| "epoch": 1.293124246079614, |
| "grad_norm": 0.31647638371635867, |
| "learning_rate": 3.161304736371761e-05, |
| "loss": 0.3327, |
| "step": 1072 |
| }, |
| { |
| "epoch": 1.2943305186972256, |
| "grad_norm": 0.370193912732274, |
| "learning_rate": 3.1590705987488834e-05, |
| "loss": 0.3827, |
| "step": 1073 |
| }, |
| { |
| "epoch": 1.2955367913148372, |
| "grad_norm": 0.362268565167923, |
| "learning_rate": 3.1568364611260055e-05, |
| "loss": 0.3817, |
| "step": 1074 |
| }, |
| { |
| "epoch": 1.2967430639324489, |
| "grad_norm": 0.3333040592999143, |
| "learning_rate": 3.154602323503128e-05, |
| "loss": 0.3859, |
| "step": 1075 |
| }, |
| { |
| "epoch": 1.2979493365500603, |
| "grad_norm": 0.37198883984977626, |
| "learning_rate": 3.15236818588025e-05, |
| "loss": 0.362, |
| "step": 1076 |
| }, |
| { |
| "epoch": 1.299155609167672, |
| "grad_norm": 0.32479600140192555, |
| "learning_rate": 3.1501340482573724e-05, |
| "loss": 0.361, |
| "step": 1077 |
| }, |
| { |
| "epoch": 1.3003618817852836, |
| "grad_norm": 0.3041287501118341, |
| "learning_rate": 3.147899910634495e-05, |
| "loss": 0.3551, |
| "step": 1078 |
| }, |
| { |
| "epoch": 1.301568154402895, |
| "grad_norm": 0.3508350111820672, |
| "learning_rate": 3.145665773011618e-05, |
| "loss": 0.3467, |
| "step": 1079 |
| }, |
| { |
| "epoch": 1.3027744270205066, |
| "grad_norm": 0.31289719028187885, |
| "learning_rate": 3.14343163538874e-05, |
| "loss": 0.3464, |
| "step": 1080 |
| }, |
| { |
| "epoch": 1.3039806996381182, |
| "grad_norm": 0.3525515286475814, |
| "learning_rate": 3.141197497765863e-05, |
| "loss": 0.3706, |
| "step": 1081 |
| }, |
| { |
| "epoch": 1.3051869722557297, |
| "grad_norm": 0.36739799881084373, |
| "learning_rate": 3.1389633601429854e-05, |
| "loss": 0.3826, |
| "step": 1082 |
| }, |
| { |
| "epoch": 1.3063932448733413, |
| "grad_norm": 0.3551639488288884, |
| "learning_rate": 3.1367292225201075e-05, |
| "loss": 0.3679, |
| "step": 1083 |
| }, |
| { |
| "epoch": 1.307599517490953, |
| "grad_norm": 0.5166672992275477, |
| "learning_rate": 3.1344950848972296e-05, |
| "loss": 0.3702, |
| "step": 1084 |
| }, |
| { |
| "epoch": 1.3088057901085646, |
| "grad_norm": 0.28002473093112323, |
| "learning_rate": 3.132260947274352e-05, |
| "loss": 0.365, |
| "step": 1085 |
| }, |
| { |
| "epoch": 1.3100120627261762, |
| "grad_norm": 0.28704719612423435, |
| "learning_rate": 3.1300268096514744e-05, |
| "loss": 0.3657, |
| "step": 1086 |
| }, |
| { |
| "epoch": 1.3112183353437876, |
| "grad_norm": 0.2885270894752884, |
| "learning_rate": 3.127792672028597e-05, |
| "loss": 0.3497, |
| "step": 1087 |
| }, |
| { |
| "epoch": 1.3124246079613993, |
| "grad_norm": 0.28563636025816486, |
| "learning_rate": 3.12555853440572e-05, |
| "loss": 0.3653, |
| "step": 1088 |
| }, |
| { |
| "epoch": 1.313630880579011, |
| "grad_norm": 0.3654990574163292, |
| "learning_rate": 3.123324396782842e-05, |
| "loss": 0.3598, |
| "step": 1089 |
| }, |
| { |
| "epoch": 1.3148371531966223, |
| "grad_norm": 0.31153048181894816, |
| "learning_rate": 3.121090259159965e-05, |
| "loss": 0.3688, |
| "step": 1090 |
| }, |
| { |
| "epoch": 1.316043425814234, |
| "grad_norm": 0.30372482261187445, |
| "learning_rate": 3.118856121537087e-05, |
| "loss": 0.372, |
| "step": 1091 |
| }, |
| { |
| "epoch": 1.3172496984318456, |
| "grad_norm": 0.3928628759286902, |
| "learning_rate": 3.116621983914209e-05, |
| "loss": 0.366, |
| "step": 1092 |
| }, |
| { |
| "epoch": 1.3184559710494572, |
| "grad_norm": 0.28051320063916957, |
| "learning_rate": 3.1143878462913316e-05, |
| "loss": 0.372, |
| "step": 1093 |
| }, |
| { |
| "epoch": 1.3196622436670689, |
| "grad_norm": 0.353437964831599, |
| "learning_rate": 3.1121537086684543e-05, |
| "loss": 0.3784, |
| "step": 1094 |
| }, |
| { |
| "epoch": 1.3208685162846803, |
| "grad_norm": 0.36270292248092095, |
| "learning_rate": 3.1099195710455764e-05, |
| "loss": 0.3784, |
| "step": 1095 |
| }, |
| { |
| "epoch": 1.322074788902292, |
| "grad_norm": 0.3100183493099864, |
| "learning_rate": 3.107685433422699e-05, |
| "loss": 0.366, |
| "step": 1096 |
| }, |
| { |
| "epoch": 1.3232810615199035, |
| "grad_norm": 0.33662286705267247, |
| "learning_rate": 3.105451295799822e-05, |
| "loss": 0.3688, |
| "step": 1097 |
| }, |
| { |
| "epoch": 1.324487334137515, |
| "grad_norm": 0.27058145874774586, |
| "learning_rate": 3.103217158176944e-05, |
| "loss": 0.3558, |
| "step": 1098 |
| }, |
| { |
| "epoch": 1.3256936067551266, |
| "grad_norm": 0.3814233812729274, |
| "learning_rate": 3.100983020554066e-05, |
| "loss": 0.3543, |
| "step": 1099 |
| }, |
| { |
| "epoch": 1.3268998793727382, |
| "grad_norm": 0.3440571607399901, |
| "learning_rate": 3.098748882931189e-05, |
| "loss": 0.3524, |
| "step": 1100 |
| }, |
| { |
| "epoch": 1.3281061519903499, |
| "grad_norm": 0.42548361963668074, |
| "learning_rate": 3.096514745308311e-05, |
| "loss": 0.3677, |
| "step": 1101 |
| }, |
| { |
| "epoch": 1.3293124246079615, |
| "grad_norm": 0.31583199094929404, |
| "learning_rate": 3.0942806076854336e-05, |
| "loss": 0.3821, |
| "step": 1102 |
| }, |
| { |
| "epoch": 1.330518697225573, |
| "grad_norm": 0.4079717432016741, |
| "learning_rate": 3.0920464700625564e-05, |
| "loss": 0.3686, |
| "step": 1103 |
| }, |
| { |
| "epoch": 1.3317249698431846, |
| "grad_norm": 0.3311395530178617, |
| "learning_rate": 3.0898123324396784e-05, |
| "loss": 0.3687, |
| "step": 1104 |
| }, |
| { |
| "epoch": 1.3329312424607962, |
| "grad_norm": 0.30119192202192563, |
| "learning_rate": 3.087578194816801e-05, |
| "loss": 0.3695, |
| "step": 1105 |
| }, |
| { |
| "epoch": 1.3341375150784076, |
| "grad_norm": 0.3024724334141376, |
| "learning_rate": 3.085344057193923e-05, |
| "loss": 0.3531, |
| "step": 1106 |
| }, |
| { |
| "epoch": 1.3353437876960192, |
| "grad_norm": 0.3020172197925826, |
| "learning_rate": 3.083109919571045e-05, |
| "loss": 0.3788, |
| "step": 1107 |
| }, |
| { |
| "epoch": 1.3365500603136309, |
| "grad_norm": 0.2823663382158882, |
| "learning_rate": 3.080875781948168e-05, |
| "loss": 0.3401, |
| "step": 1108 |
| }, |
| { |
| "epoch": 1.3377563329312425, |
| "grad_norm": 0.302427062615936, |
| "learning_rate": 3.078641644325291e-05, |
| "loss": 0.3642, |
| "step": 1109 |
| }, |
| { |
| "epoch": 1.3389626055488542, |
| "grad_norm": 0.31551900898503815, |
| "learning_rate": 3.076407506702413e-05, |
| "loss": 0.3555, |
| "step": 1110 |
| }, |
| { |
| "epoch": 1.3401688781664656, |
| "grad_norm": 0.3312984136735708, |
| "learning_rate": 3.0741733690795356e-05, |
| "loss": 0.3673, |
| "step": 1111 |
| }, |
| { |
| "epoch": 1.3413751507840772, |
| "grad_norm": 0.31739717512214, |
| "learning_rate": 3.0719392314566584e-05, |
| "loss": 0.357, |
| "step": 1112 |
| }, |
| { |
| "epoch": 1.3425814234016888, |
| "grad_norm": 0.2727046695858616, |
| "learning_rate": 3.0697050938337804e-05, |
| "loss": 0.3634, |
| "step": 1113 |
| }, |
| { |
| "epoch": 1.3437876960193003, |
| "grad_norm": 0.3776160250845144, |
| "learning_rate": 3.0674709562109025e-05, |
| "loss": 0.3812, |
| "step": 1114 |
| }, |
| { |
| "epoch": 1.344993968636912, |
| "grad_norm": 0.331153836222907, |
| "learning_rate": 3.065236818588025e-05, |
| "loss": 0.3706, |
| "step": 1115 |
| }, |
| { |
| "epoch": 1.3462002412545235, |
| "grad_norm": 0.3135798994454436, |
| "learning_rate": 3.063002680965147e-05, |
| "loss": 0.3695, |
| "step": 1116 |
| }, |
| { |
| "epoch": 1.3474065138721352, |
| "grad_norm": 0.3398371342476646, |
| "learning_rate": 3.06076854334227e-05, |
| "loss": 0.3648, |
| "step": 1117 |
| }, |
| { |
| "epoch": 1.3486127864897468, |
| "grad_norm": 0.3355446307451407, |
| "learning_rate": 3.058534405719393e-05, |
| "loss": 0.3689, |
| "step": 1118 |
| }, |
| { |
| "epoch": 1.3498190591073582, |
| "grad_norm": 0.32871921032944523, |
| "learning_rate": 3.056300268096515e-05, |
| "loss": 0.3823, |
| "step": 1119 |
| }, |
| { |
| "epoch": 1.3510253317249699, |
| "grad_norm": 0.3324992371527075, |
| "learning_rate": 3.0540661304736376e-05, |
| "loss": 0.3533, |
| "step": 1120 |
| }, |
| { |
| "epoch": 1.3522316043425815, |
| "grad_norm": 0.3898432118600308, |
| "learning_rate": 3.05183199285076e-05, |
| "loss": 0.3482, |
| "step": 1121 |
| }, |
| { |
| "epoch": 1.353437876960193, |
| "grad_norm": 0.27000069674814814, |
| "learning_rate": 3.049597855227882e-05, |
| "loss": 0.367, |
| "step": 1122 |
| }, |
| { |
| "epoch": 1.3546441495778045, |
| "grad_norm": 0.3560131101054812, |
| "learning_rate": 3.0473637176050045e-05, |
| "loss": 0.3725, |
| "step": 1123 |
| }, |
| { |
| "epoch": 1.3558504221954162, |
| "grad_norm": 0.3466814385369649, |
| "learning_rate": 3.0451295799821273e-05, |
| "loss": 0.3614, |
| "step": 1124 |
| }, |
| { |
| "epoch": 1.3570566948130278, |
| "grad_norm": 0.3147914304038281, |
| "learning_rate": 3.0428954423592494e-05, |
| "loss": 0.3672, |
| "step": 1125 |
| }, |
| { |
| "epoch": 1.3582629674306395, |
| "grad_norm": 0.3187982701455795, |
| "learning_rate": 3.0406613047363718e-05, |
| "loss": 0.3545, |
| "step": 1126 |
| }, |
| { |
| "epoch": 1.3594692400482509, |
| "grad_norm": 0.2971609813242021, |
| "learning_rate": 3.0384271671134945e-05, |
| "loss": 0.3646, |
| "step": 1127 |
| }, |
| { |
| "epoch": 1.3606755126658625, |
| "grad_norm": 0.28948184614925276, |
| "learning_rate": 3.0361930294906166e-05, |
| "loss": 0.3724, |
| "step": 1128 |
| }, |
| { |
| "epoch": 1.3618817852834741, |
| "grad_norm": 0.26100840084723725, |
| "learning_rate": 3.0339588918677393e-05, |
| "loss": 0.3589, |
| "step": 1129 |
| }, |
| { |
| "epoch": 1.3630880579010856, |
| "grad_norm": 0.26791285850652824, |
| "learning_rate": 3.0317247542448617e-05, |
| "loss": 0.3601, |
| "step": 1130 |
| }, |
| { |
| "epoch": 1.3642943305186972, |
| "grad_norm": 0.29962236845210755, |
| "learning_rate": 3.0294906166219838e-05, |
| "loss": 0.3475, |
| "step": 1131 |
| }, |
| { |
| "epoch": 1.3655006031363088, |
| "grad_norm": 0.3346919570043122, |
| "learning_rate": 3.0272564789991066e-05, |
| "loss": 0.3578, |
| "step": 1132 |
| }, |
| { |
| "epoch": 1.3667068757539202, |
| "grad_norm": 0.26181644687495764, |
| "learning_rate": 3.025022341376229e-05, |
| "loss": 0.3618, |
| "step": 1133 |
| }, |
| { |
| "epoch": 1.3679131483715319, |
| "grad_norm": 0.2724118620499691, |
| "learning_rate": 3.022788203753351e-05, |
| "loss": 0.3685, |
| "step": 1134 |
| }, |
| { |
| "epoch": 1.3691194209891435, |
| "grad_norm": 0.291344635313524, |
| "learning_rate": 3.0205540661304738e-05, |
| "loss": 0.3563, |
| "step": 1135 |
| }, |
| { |
| "epoch": 1.3703256936067552, |
| "grad_norm": 0.3091838446015483, |
| "learning_rate": 3.0183199285075965e-05, |
| "loss": 0.3297, |
| "step": 1136 |
| }, |
| { |
| "epoch": 1.3715319662243668, |
| "grad_norm": 0.2791543209149448, |
| "learning_rate": 3.0160857908847186e-05, |
| "loss": 0.3691, |
| "step": 1137 |
| }, |
| { |
| "epoch": 1.3727382388419782, |
| "grad_norm": 0.30051706948641876, |
| "learning_rate": 3.013851653261841e-05, |
| "loss": 0.3635, |
| "step": 1138 |
| }, |
| { |
| "epoch": 1.3739445114595898, |
| "grad_norm": 0.34203068886863713, |
| "learning_rate": 3.0116175156389638e-05, |
| "loss": 0.3786, |
| "step": 1139 |
| }, |
| { |
| "epoch": 1.3751507840772015, |
| "grad_norm": 0.2751714894785982, |
| "learning_rate": 3.0093833780160858e-05, |
| "loss": 0.3525, |
| "step": 1140 |
| }, |
| { |
| "epoch": 1.376357056694813, |
| "grad_norm": 0.3541112573734306, |
| "learning_rate": 3.0071492403932082e-05, |
| "loss": 0.3584, |
| "step": 1141 |
| }, |
| { |
| "epoch": 1.3775633293124245, |
| "grad_norm": 0.3310525285267929, |
| "learning_rate": 3.004915102770331e-05, |
| "loss": 0.3771, |
| "step": 1142 |
| }, |
| { |
| "epoch": 1.3787696019300362, |
| "grad_norm": 0.2870345778600548, |
| "learning_rate": 3.002680965147453e-05, |
| "loss": 0.3504, |
| "step": 1143 |
| }, |
| { |
| "epoch": 1.3799758745476478, |
| "grad_norm": 0.3248559870711437, |
| "learning_rate": 3.0004468275245755e-05, |
| "loss": 0.3377, |
| "step": 1144 |
| }, |
| { |
| "epoch": 1.3811821471652594, |
| "grad_norm": 0.36564049706754104, |
| "learning_rate": 2.9982126899016982e-05, |
| "loss": 0.3572, |
| "step": 1145 |
| }, |
| { |
| "epoch": 1.3823884197828709, |
| "grad_norm": 0.3125346957664997, |
| "learning_rate": 2.9959785522788203e-05, |
| "loss": 0.366, |
| "step": 1146 |
| }, |
| { |
| "epoch": 1.3835946924004825, |
| "grad_norm": 0.3716766984185386, |
| "learning_rate": 2.993744414655943e-05, |
| "loss": 0.3692, |
| "step": 1147 |
| }, |
| { |
| "epoch": 1.3848009650180941, |
| "grad_norm": 0.40406522080836904, |
| "learning_rate": 2.9915102770330654e-05, |
| "loss": 0.3847, |
| "step": 1148 |
| }, |
| { |
| "epoch": 1.3860072376357055, |
| "grad_norm": 0.3553874252899629, |
| "learning_rate": 2.9892761394101875e-05, |
| "loss": 0.3792, |
| "step": 1149 |
| }, |
| { |
| "epoch": 1.3872135102533172, |
| "grad_norm": 0.3654246145476641, |
| "learning_rate": 2.9870420017873103e-05, |
| "loss": 0.3523, |
| "step": 1150 |
| }, |
| { |
| "epoch": 1.3884197828709288, |
| "grad_norm": 0.372654778385698, |
| "learning_rate": 2.9848078641644327e-05, |
| "loss": 0.363, |
| "step": 1151 |
| }, |
| { |
| "epoch": 1.3896260554885405, |
| "grad_norm": 0.318400474242105, |
| "learning_rate": 2.9825737265415547e-05, |
| "loss": 0.3665, |
| "step": 1152 |
| }, |
| { |
| "epoch": 1.390832328106152, |
| "grad_norm": 0.4476226138153656, |
| "learning_rate": 2.9803395889186775e-05, |
| "loss": 0.3586, |
| "step": 1153 |
| }, |
| { |
| "epoch": 1.3920386007237635, |
| "grad_norm": 0.36090984552024646, |
| "learning_rate": 2.9781054512958002e-05, |
| "loss": 0.3878, |
| "step": 1154 |
| }, |
| { |
| "epoch": 1.3932448733413751, |
| "grad_norm": 0.34995570790330377, |
| "learning_rate": 2.9758713136729223e-05, |
| "loss": 0.3739, |
| "step": 1155 |
| }, |
| { |
| "epoch": 1.3944511459589868, |
| "grad_norm": 0.4036557498361607, |
| "learning_rate": 2.9736371760500447e-05, |
| "loss": 0.3656, |
| "step": 1156 |
| }, |
| { |
| "epoch": 1.3956574185765982, |
| "grad_norm": 0.25983094278157565, |
| "learning_rate": 2.9714030384271675e-05, |
| "loss": 0.3651, |
| "step": 1157 |
| }, |
| { |
| "epoch": 1.3968636911942098, |
| "grad_norm": 0.46585062876561006, |
| "learning_rate": 2.9691689008042895e-05, |
| "loss": 0.366, |
| "step": 1158 |
| }, |
| { |
| "epoch": 1.3980699638118215, |
| "grad_norm": 0.29308421171324583, |
| "learning_rate": 2.966934763181412e-05, |
| "loss": 0.3472, |
| "step": 1159 |
| }, |
| { |
| "epoch": 1.399276236429433, |
| "grad_norm": 0.4212438988181364, |
| "learning_rate": 2.9647006255585347e-05, |
| "loss": 0.3652, |
| "step": 1160 |
| }, |
| { |
| "epoch": 1.4004825090470447, |
| "grad_norm": 0.2845531602103912, |
| "learning_rate": 2.9624664879356567e-05, |
| "loss": 0.3736, |
| "step": 1161 |
| }, |
| { |
| "epoch": 1.4016887816646562, |
| "grad_norm": 0.4362233547700654, |
| "learning_rate": 2.9602323503127795e-05, |
| "loss": 0.3763, |
| "step": 1162 |
| }, |
| { |
| "epoch": 1.4028950542822678, |
| "grad_norm": 0.28669154667850383, |
| "learning_rate": 2.957998212689902e-05, |
| "loss": 0.3634, |
| "step": 1163 |
| }, |
| { |
| "epoch": 1.4041013268998794, |
| "grad_norm": 0.3752571742382678, |
| "learning_rate": 2.955764075067024e-05, |
| "loss": 0.3698, |
| "step": 1164 |
| }, |
| { |
| "epoch": 1.4053075995174908, |
| "grad_norm": 0.30884212641230485, |
| "learning_rate": 2.9535299374441467e-05, |
| "loss": 0.3504, |
| "step": 1165 |
| }, |
| { |
| "epoch": 1.4065138721351025, |
| "grad_norm": 0.3587145325155489, |
| "learning_rate": 2.951295799821269e-05, |
| "loss": 0.3646, |
| "step": 1166 |
| }, |
| { |
| "epoch": 1.4077201447527141, |
| "grad_norm": 0.29533411161129886, |
| "learning_rate": 2.9490616621983912e-05, |
| "loss": 0.3515, |
| "step": 1167 |
| }, |
| { |
| "epoch": 1.4089264173703258, |
| "grad_norm": 0.2615406154832229, |
| "learning_rate": 2.946827524575514e-05, |
| "loss": 0.3574, |
| "step": 1168 |
| }, |
| { |
| "epoch": 1.4101326899879374, |
| "grad_norm": 0.32893567819819586, |
| "learning_rate": 2.9445933869526367e-05, |
| "loss": 0.3603, |
| "step": 1169 |
| }, |
| { |
| "epoch": 1.4113389626055488, |
| "grad_norm": 0.29965434978983685, |
| "learning_rate": 2.9423592493297584e-05, |
| "loss": 0.3596, |
| "step": 1170 |
| }, |
| { |
| "epoch": 1.4125452352231604, |
| "grad_norm": 0.31822499295465917, |
| "learning_rate": 2.9401251117068812e-05, |
| "loss": 0.3675, |
| "step": 1171 |
| }, |
| { |
| "epoch": 1.413751507840772, |
| "grad_norm": 0.2753822644102095, |
| "learning_rate": 2.937890974084004e-05, |
| "loss": 0.3451, |
| "step": 1172 |
| }, |
| { |
| "epoch": 1.4149577804583835, |
| "grad_norm": 0.3298050786742377, |
| "learning_rate": 2.935656836461126e-05, |
| "loss": 0.3706, |
| "step": 1173 |
| }, |
| { |
| "epoch": 1.4161640530759951, |
| "grad_norm": 0.28445112216104007, |
| "learning_rate": 2.9334226988382484e-05, |
| "loss": 0.3507, |
| "step": 1174 |
| }, |
| { |
| "epoch": 1.4173703256936068, |
| "grad_norm": 0.3092808406057826, |
| "learning_rate": 2.931188561215371e-05, |
| "loss": 0.3958, |
| "step": 1175 |
| }, |
| { |
| "epoch": 1.4185765983112184, |
| "grad_norm": 0.2536950340386672, |
| "learning_rate": 2.9289544235924932e-05, |
| "loss": 0.3511, |
| "step": 1176 |
| }, |
| { |
| "epoch": 1.41978287092883, |
| "grad_norm": 0.35919383431613366, |
| "learning_rate": 2.9267202859696156e-05, |
| "loss": 0.3598, |
| "step": 1177 |
| }, |
| { |
| "epoch": 1.4209891435464415, |
| "grad_norm": 0.30220803684797587, |
| "learning_rate": 2.9244861483467384e-05, |
| "loss": 0.3671, |
| "step": 1178 |
| }, |
| { |
| "epoch": 1.422195416164053, |
| "grad_norm": 0.3015596602053583, |
| "learning_rate": 2.9222520107238604e-05, |
| "loss": 0.3627, |
| "step": 1179 |
| }, |
| { |
| "epoch": 1.4234016887816647, |
| "grad_norm": 0.28995228992941746, |
| "learning_rate": 2.9200178731009832e-05, |
| "loss": 0.35, |
| "step": 1180 |
| }, |
| { |
| "epoch": 1.4246079613992761, |
| "grad_norm": 0.29605766490997004, |
| "learning_rate": 2.9177837354781056e-05, |
| "loss": 0.3567, |
| "step": 1181 |
| }, |
| { |
| "epoch": 1.4258142340168878, |
| "grad_norm": 0.2976811007058239, |
| "learning_rate": 2.9155495978552283e-05, |
| "loss": 0.3436, |
| "step": 1182 |
| }, |
| { |
| "epoch": 1.4270205066344994, |
| "grad_norm": 0.2601540220761718, |
| "learning_rate": 2.9133154602323504e-05, |
| "loss": 0.3477, |
| "step": 1183 |
| }, |
| { |
| "epoch": 1.428226779252111, |
| "grad_norm": 0.2834282356161823, |
| "learning_rate": 2.9110813226094728e-05, |
| "loss": 0.3602, |
| "step": 1184 |
| }, |
| { |
| "epoch": 1.4294330518697227, |
| "grad_norm": 0.291754499437398, |
| "learning_rate": 2.9088471849865956e-05, |
| "loss": 0.3633, |
| "step": 1185 |
| }, |
| { |
| "epoch": 1.430639324487334, |
| "grad_norm": 0.2735533586976796, |
| "learning_rate": 2.9066130473637176e-05, |
| "loss": 0.3408, |
| "step": 1186 |
| }, |
| { |
| "epoch": 1.4318455971049457, |
| "grad_norm": 0.28797235804097754, |
| "learning_rate": 2.9043789097408404e-05, |
| "loss": 0.3665, |
| "step": 1187 |
| }, |
| { |
| "epoch": 1.4330518697225574, |
| "grad_norm": 0.27749621251874057, |
| "learning_rate": 2.9021447721179628e-05, |
| "loss": 0.364, |
| "step": 1188 |
| }, |
| { |
| "epoch": 1.4342581423401688, |
| "grad_norm": 0.32339392380392684, |
| "learning_rate": 2.899910634495085e-05, |
| "loss": 0.3514, |
| "step": 1189 |
| }, |
| { |
| "epoch": 1.4354644149577804, |
| "grad_norm": 0.2926720133950538, |
| "learning_rate": 2.8976764968722076e-05, |
| "loss": 0.3842, |
| "step": 1190 |
| }, |
| { |
| "epoch": 1.436670687575392, |
| "grad_norm": 0.28827795732382455, |
| "learning_rate": 2.89544235924933e-05, |
| "loss": 0.3606, |
| "step": 1191 |
| }, |
| { |
| "epoch": 1.4378769601930035, |
| "grad_norm": 0.2915271734549216, |
| "learning_rate": 2.893208221626452e-05, |
| "loss": 0.3536, |
| "step": 1192 |
| }, |
| { |
| "epoch": 1.4390832328106151, |
| "grad_norm": 0.40519254838528146, |
| "learning_rate": 2.890974084003575e-05, |
| "loss": 0.3751, |
| "step": 1193 |
| }, |
| { |
| "epoch": 1.4402895054282268, |
| "grad_norm": 0.29164522307451995, |
| "learning_rate": 2.8887399463806976e-05, |
| "loss": 0.3705, |
| "step": 1194 |
| }, |
| { |
| "epoch": 1.4414957780458384, |
| "grad_norm": 0.311020331010995, |
| "learning_rate": 2.8865058087578197e-05, |
| "loss": 0.3538, |
| "step": 1195 |
| }, |
| { |
| "epoch": 1.44270205066345, |
| "grad_norm": 0.35439846786285, |
| "learning_rate": 2.884271671134942e-05, |
| "loss": 0.3823, |
| "step": 1196 |
| }, |
| { |
| "epoch": 1.4439083232810614, |
| "grad_norm": 0.31436335956903844, |
| "learning_rate": 2.8820375335120648e-05, |
| "loss": 0.3714, |
| "step": 1197 |
| }, |
| { |
| "epoch": 1.445114595898673, |
| "grad_norm": 0.35023249437780235, |
| "learning_rate": 2.879803395889187e-05, |
| "loss": 0.3915, |
| "step": 1198 |
| }, |
| { |
| "epoch": 1.4463208685162847, |
| "grad_norm": 0.28045651639475067, |
| "learning_rate": 2.8775692582663093e-05, |
| "loss": 0.346, |
| "step": 1199 |
| }, |
| { |
| "epoch": 1.4475271411338961, |
| "grad_norm": 0.31926475304857066, |
| "learning_rate": 2.875335120643432e-05, |
| "loss": 0.3444, |
| "step": 1200 |
| }, |
| { |
| "epoch": 1.4487334137515078, |
| "grad_norm": 0.28708496856007876, |
| "learning_rate": 2.873100983020554e-05, |
| "loss": 0.3498, |
| "step": 1201 |
| }, |
| { |
| "epoch": 1.4499396863691194, |
| "grad_norm": 0.43262881864227015, |
| "learning_rate": 2.870866845397677e-05, |
| "loss": 0.3505, |
| "step": 1202 |
| }, |
| { |
| "epoch": 1.451145958986731, |
| "grad_norm": 0.2687357810356031, |
| "learning_rate": 2.8686327077747993e-05, |
| "loss": 0.3519, |
| "step": 1203 |
| }, |
| { |
| "epoch": 1.4523522316043427, |
| "grad_norm": 0.3607507222010132, |
| "learning_rate": 2.8663985701519213e-05, |
| "loss": 0.3723, |
| "step": 1204 |
| }, |
| { |
| "epoch": 1.453558504221954, |
| "grad_norm": 0.30338701749850056, |
| "learning_rate": 2.864164432529044e-05, |
| "loss": 0.3587, |
| "step": 1205 |
| }, |
| { |
| "epoch": 1.4547647768395657, |
| "grad_norm": 0.31603329875122715, |
| "learning_rate": 2.8619302949061665e-05, |
| "loss": 0.3757, |
| "step": 1206 |
| }, |
| { |
| "epoch": 1.4559710494571774, |
| "grad_norm": 0.33022323730816805, |
| "learning_rate": 2.8596961572832886e-05, |
| "loss": 0.3699, |
| "step": 1207 |
| }, |
| { |
| "epoch": 1.4571773220747888, |
| "grad_norm": 0.306872731375517, |
| "learning_rate": 2.8574620196604113e-05, |
| "loss": 0.3915, |
| "step": 1208 |
| }, |
| { |
| "epoch": 1.4583835946924004, |
| "grad_norm": 0.34818044786028685, |
| "learning_rate": 2.8552278820375337e-05, |
| "loss": 0.3686, |
| "step": 1209 |
| }, |
| { |
| "epoch": 1.459589867310012, |
| "grad_norm": 0.3180265730282909, |
| "learning_rate": 2.8529937444146558e-05, |
| "loss": 0.3433, |
| "step": 1210 |
| }, |
| { |
| "epoch": 1.4607961399276237, |
| "grad_norm": 0.28857128660801623, |
| "learning_rate": 2.8507596067917785e-05, |
| "loss": 0.3366, |
| "step": 1211 |
| }, |
| { |
| "epoch": 1.4620024125452353, |
| "grad_norm": 0.3482553573855266, |
| "learning_rate": 2.8485254691689013e-05, |
| "loss": 0.3604, |
| "step": 1212 |
| }, |
| { |
| "epoch": 1.4632086851628467, |
| "grad_norm": 0.30676083475386495, |
| "learning_rate": 2.8462913315460234e-05, |
| "loss": 0.3815, |
| "step": 1213 |
| }, |
| { |
| "epoch": 1.4644149577804584, |
| "grad_norm": 0.30367765388377194, |
| "learning_rate": 2.8440571939231458e-05, |
| "loss": 0.3726, |
| "step": 1214 |
| }, |
| { |
| "epoch": 1.46562123039807, |
| "grad_norm": 0.3446510768407655, |
| "learning_rate": 2.8418230563002685e-05, |
| "loss": 0.3677, |
| "step": 1215 |
| }, |
| { |
| "epoch": 1.4668275030156814, |
| "grad_norm": 0.3172797310351058, |
| "learning_rate": 2.8395889186773906e-05, |
| "loss": 0.363, |
| "step": 1216 |
| }, |
| { |
| "epoch": 1.468033775633293, |
| "grad_norm": 0.2953075162278567, |
| "learning_rate": 2.837354781054513e-05, |
| "loss": 0.3756, |
| "step": 1217 |
| }, |
| { |
| "epoch": 1.4692400482509047, |
| "grad_norm": 0.32783840098529415, |
| "learning_rate": 2.8351206434316357e-05, |
| "loss": 0.3642, |
| "step": 1218 |
| }, |
| { |
| "epoch": 1.4704463208685163, |
| "grad_norm": 0.280445028660401, |
| "learning_rate": 2.8328865058087578e-05, |
| "loss": 0.3513, |
| "step": 1219 |
| }, |
| { |
| "epoch": 1.471652593486128, |
| "grad_norm": 0.33558468097054406, |
| "learning_rate": 2.8306523681858806e-05, |
| "loss": 0.3651, |
| "step": 1220 |
| }, |
| { |
| "epoch": 1.4728588661037394, |
| "grad_norm": 0.336070959339012, |
| "learning_rate": 2.828418230563003e-05, |
| "loss": 0.3591, |
| "step": 1221 |
| }, |
| { |
| "epoch": 1.474065138721351, |
| "grad_norm": 0.2900358012573861, |
| "learning_rate": 2.826184092940125e-05, |
| "loss": 0.3744, |
| "step": 1222 |
| }, |
| { |
| "epoch": 1.4752714113389627, |
| "grad_norm": 0.2972532526663739, |
| "learning_rate": 2.8239499553172478e-05, |
| "loss": 0.3502, |
| "step": 1223 |
| }, |
| { |
| "epoch": 1.476477683956574, |
| "grad_norm": 0.3078587095137843, |
| "learning_rate": 2.8217158176943702e-05, |
| "loss": 0.3647, |
| "step": 1224 |
| }, |
| { |
| "epoch": 1.4776839565741857, |
| "grad_norm": 0.32293847798260233, |
| "learning_rate": 2.8194816800714923e-05, |
| "loss": 0.3621, |
| "step": 1225 |
| }, |
| { |
| "epoch": 1.4788902291917974, |
| "grad_norm": 0.2698381433035557, |
| "learning_rate": 2.817247542448615e-05, |
| "loss": 0.3625, |
| "step": 1226 |
| }, |
| { |
| "epoch": 1.480096501809409, |
| "grad_norm": 0.3092649084688229, |
| "learning_rate": 2.8150134048257378e-05, |
| "loss": 0.3588, |
| "step": 1227 |
| }, |
| { |
| "epoch": 1.4813027744270206, |
| "grad_norm": 0.32836804858453694, |
| "learning_rate": 2.81277926720286e-05, |
| "loss": 0.3726, |
| "step": 1228 |
| }, |
| { |
| "epoch": 1.482509047044632, |
| "grad_norm": 0.4687070308155192, |
| "learning_rate": 2.8105451295799822e-05, |
| "loss": 0.3517, |
| "step": 1229 |
| }, |
| { |
| "epoch": 1.4837153196622437, |
| "grad_norm": 0.33776920307320657, |
| "learning_rate": 2.808310991957105e-05, |
| "loss": 0.363, |
| "step": 1230 |
| }, |
| { |
| "epoch": 1.4849215922798553, |
| "grad_norm": 0.34831064110686916, |
| "learning_rate": 2.806076854334227e-05, |
| "loss": 0.3582, |
| "step": 1231 |
| }, |
| { |
| "epoch": 1.4861278648974667, |
| "grad_norm": 0.3530691789102461, |
| "learning_rate": 2.8038427167113495e-05, |
| "loss": 0.3749, |
| "step": 1232 |
| }, |
| { |
| "epoch": 1.4873341375150784, |
| "grad_norm": 0.31161336626579134, |
| "learning_rate": 2.8016085790884722e-05, |
| "loss": 0.3447, |
| "step": 1233 |
| }, |
| { |
| "epoch": 1.48854041013269, |
| "grad_norm": 0.2861464976049845, |
| "learning_rate": 2.7993744414655943e-05, |
| "loss": 0.3377, |
| "step": 1234 |
| }, |
| { |
| "epoch": 1.4897466827503016, |
| "grad_norm": 0.3600397490414202, |
| "learning_rate": 2.7971403038427167e-05, |
| "loss": 0.3677, |
| "step": 1235 |
| }, |
| { |
| "epoch": 1.4909529553679133, |
| "grad_norm": 0.28131622511858695, |
| "learning_rate": 2.7949061662198394e-05, |
| "loss": 0.3789, |
| "step": 1236 |
| }, |
| { |
| "epoch": 1.4921592279855247, |
| "grad_norm": 0.3516965064598944, |
| "learning_rate": 2.7926720285969615e-05, |
| "loss": 0.3852, |
| "step": 1237 |
| }, |
| { |
| "epoch": 1.4933655006031363, |
| "grad_norm": 0.302444809100267, |
| "learning_rate": 2.7904378909740843e-05, |
| "loss": 0.3666, |
| "step": 1238 |
| }, |
| { |
| "epoch": 1.494571773220748, |
| "grad_norm": 0.3409455236720351, |
| "learning_rate": 2.7882037533512067e-05, |
| "loss": 0.3713, |
| "step": 1239 |
| }, |
| { |
| "epoch": 1.4957780458383594, |
| "grad_norm": 0.27999659004938476, |
| "learning_rate": 2.7859696157283287e-05, |
| "loss": 0.3563, |
| "step": 1240 |
| }, |
| { |
| "epoch": 1.496984318455971, |
| "grad_norm": 0.2948450451811389, |
| "learning_rate": 2.7837354781054515e-05, |
| "loss": 0.3618, |
| "step": 1241 |
| }, |
| { |
| "epoch": 1.4981905910735827, |
| "grad_norm": 0.2986130381148085, |
| "learning_rate": 2.781501340482574e-05, |
| "loss": 0.3635, |
| "step": 1242 |
| }, |
| { |
| "epoch": 1.4993968636911943, |
| "grad_norm": 0.29615277744615615, |
| "learning_rate": 2.779267202859696e-05, |
| "loss": 0.3539, |
| "step": 1243 |
| }, |
| { |
| "epoch": 1.500603136308806, |
| "grad_norm": 0.3364174580644653, |
| "learning_rate": 2.7770330652368187e-05, |
| "loss": 0.3498, |
| "step": 1244 |
| }, |
| { |
| "epoch": 1.5018094089264173, |
| "grad_norm": 0.2763203714148372, |
| "learning_rate": 2.7747989276139415e-05, |
| "loss": 0.3558, |
| "step": 1245 |
| }, |
| { |
| "epoch": 1.503015681544029, |
| "grad_norm": 0.34787271866709774, |
| "learning_rate": 2.7725647899910635e-05, |
| "loss": 0.3673, |
| "step": 1246 |
| }, |
| { |
| "epoch": 1.5042219541616406, |
| "grad_norm": 0.30434396850748885, |
| "learning_rate": 2.770330652368186e-05, |
| "loss": 0.3638, |
| "step": 1247 |
| }, |
| { |
| "epoch": 1.505428226779252, |
| "grad_norm": 0.33166499767283425, |
| "learning_rate": 2.7680965147453087e-05, |
| "loss": 0.3515, |
| "step": 1248 |
| }, |
| { |
| "epoch": 1.5066344993968637, |
| "grad_norm": 0.29134837059756147, |
| "learning_rate": 2.7658623771224308e-05, |
| "loss": 0.3691, |
| "step": 1249 |
| }, |
| { |
| "epoch": 1.5078407720144753, |
| "grad_norm": 0.2628685279525233, |
| "learning_rate": 2.763628239499553e-05, |
| "loss": 0.362, |
| "step": 1250 |
| }, |
| { |
| "epoch": 1.5090470446320867, |
| "grad_norm": 0.32505828654754587, |
| "learning_rate": 2.761394101876676e-05, |
| "loss": 0.3617, |
| "step": 1251 |
| }, |
| { |
| "epoch": 1.5102533172496986, |
| "grad_norm": 0.29118585516826345, |
| "learning_rate": 2.759159964253798e-05, |
| "loss": 0.3559, |
| "step": 1252 |
| }, |
| { |
| "epoch": 1.51145958986731, |
| "grad_norm": 0.3142120436832583, |
| "learning_rate": 2.7569258266309207e-05, |
| "loss": 0.3615, |
| "step": 1253 |
| }, |
| { |
| "epoch": 1.5126658624849216, |
| "grad_norm": 0.2795160928845954, |
| "learning_rate": 2.754691689008043e-05, |
| "loss": 0.3386, |
| "step": 1254 |
| }, |
| { |
| "epoch": 1.5138721351025333, |
| "grad_norm": 0.35336962090838353, |
| "learning_rate": 2.7524575513851652e-05, |
| "loss": 0.364, |
| "step": 1255 |
| }, |
| { |
| "epoch": 1.5150784077201447, |
| "grad_norm": 0.27339107672471447, |
| "learning_rate": 2.750223413762288e-05, |
| "loss": 0.3586, |
| "step": 1256 |
| }, |
| { |
| "epoch": 1.5162846803377563, |
| "grad_norm": 0.349577261694894, |
| "learning_rate": 2.7479892761394104e-05, |
| "loss": 0.3643, |
| "step": 1257 |
| }, |
| { |
| "epoch": 1.517490952955368, |
| "grad_norm": 0.33322429078016025, |
| "learning_rate": 2.7457551385165324e-05, |
| "loss": 0.3607, |
| "step": 1258 |
| }, |
| { |
| "epoch": 1.5186972255729794, |
| "grad_norm": 0.3461037586939447, |
| "learning_rate": 2.7435210008936552e-05, |
| "loss": 0.3623, |
| "step": 1259 |
| }, |
| { |
| "epoch": 1.5199034981905912, |
| "grad_norm": 0.28408740662062104, |
| "learning_rate": 2.741286863270778e-05, |
| "loss": 0.3673, |
| "step": 1260 |
| }, |
| { |
| "epoch": 1.5211097708082026, |
| "grad_norm": 0.29583059946956347, |
| "learning_rate": 2.7390527256478997e-05, |
| "loss": 0.3416, |
| "step": 1261 |
| }, |
| { |
| "epoch": 1.5223160434258143, |
| "grad_norm": 0.3440235067952912, |
| "learning_rate": 2.7368185880250224e-05, |
| "loss": 0.3679, |
| "step": 1262 |
| }, |
| { |
| "epoch": 1.523522316043426, |
| "grad_norm": 0.3120646894770821, |
| "learning_rate": 2.734584450402145e-05, |
| "loss": 0.3489, |
| "step": 1263 |
| }, |
| { |
| "epoch": 1.5247285886610373, |
| "grad_norm": 0.31673685428487164, |
| "learning_rate": 2.7323503127792672e-05, |
| "loss": 0.3291, |
| "step": 1264 |
| }, |
| { |
| "epoch": 1.525934861278649, |
| "grad_norm": 0.3147048237479034, |
| "learning_rate": 2.7301161751563896e-05, |
| "loss": 0.3509, |
| "step": 1265 |
| }, |
| { |
| "epoch": 1.5271411338962606, |
| "grad_norm": 0.3914175398497735, |
| "learning_rate": 2.7278820375335124e-05, |
| "loss": 0.3815, |
| "step": 1266 |
| }, |
| { |
| "epoch": 1.528347406513872, |
| "grad_norm": 0.4317414622229666, |
| "learning_rate": 2.7256478999106345e-05, |
| "loss": 0.3656, |
| "step": 1267 |
| }, |
| { |
| "epoch": 1.5295536791314839, |
| "grad_norm": 0.3462625512487634, |
| "learning_rate": 2.723413762287757e-05, |
| "loss": 0.3548, |
| "step": 1268 |
| }, |
| { |
| "epoch": 1.5307599517490953, |
| "grad_norm": 0.3130762407373115, |
| "learning_rate": 2.7211796246648796e-05, |
| "loss": 0.3481, |
| "step": 1269 |
| }, |
| { |
| "epoch": 1.531966224366707, |
| "grad_norm": 0.3096037049836567, |
| "learning_rate": 2.7189454870420017e-05, |
| "loss": 0.3663, |
| "step": 1270 |
| }, |
| { |
| "epoch": 1.5331724969843186, |
| "grad_norm": 0.3567741337319747, |
| "learning_rate": 2.7167113494191244e-05, |
| "loss": 0.3654, |
| "step": 1271 |
| }, |
| { |
| "epoch": 1.53437876960193, |
| "grad_norm": 0.2695934120971819, |
| "learning_rate": 2.714477211796247e-05, |
| "loss": 0.3724, |
| "step": 1272 |
| }, |
| { |
| "epoch": 1.5355850422195416, |
| "grad_norm": 0.35011201758948296, |
| "learning_rate": 2.712243074173369e-05, |
| "loss": 0.3962, |
| "step": 1273 |
| }, |
| { |
| "epoch": 1.5367913148371533, |
| "grad_norm": 0.31227812163963997, |
| "learning_rate": 2.7100089365504917e-05, |
| "loss": 0.3743, |
| "step": 1274 |
| }, |
| { |
| "epoch": 1.5379975874547647, |
| "grad_norm": 0.24710433568638754, |
| "learning_rate": 2.707774798927614e-05, |
| "loss": 0.3548, |
| "step": 1275 |
| }, |
| { |
| "epoch": 1.5392038600723763, |
| "grad_norm": 0.32584533326819515, |
| "learning_rate": 2.705540661304736e-05, |
| "loss": 0.369, |
| "step": 1276 |
| }, |
| { |
| "epoch": 1.540410132689988, |
| "grad_norm": 0.3136506898477773, |
| "learning_rate": 2.703306523681859e-05, |
| "loss": 0.3483, |
| "step": 1277 |
| }, |
| { |
| "epoch": 1.5416164053075994, |
| "grad_norm": 0.2835482067624798, |
| "learning_rate": 2.7010723860589816e-05, |
| "loss": 0.3794, |
| "step": 1278 |
| }, |
| { |
| "epoch": 1.5428226779252112, |
| "grad_norm": 0.29765053016237486, |
| "learning_rate": 2.6988382484361037e-05, |
| "loss": 0.3619, |
| "step": 1279 |
| }, |
| { |
| "epoch": 1.5440289505428226, |
| "grad_norm": 0.2854721347083471, |
| "learning_rate": 2.696604110813226e-05, |
| "loss": 0.3574, |
| "step": 1280 |
| }, |
| { |
| "epoch": 1.5452352231604343, |
| "grad_norm": 0.27855645978313726, |
| "learning_rate": 2.694369973190349e-05, |
| "loss": 0.3576, |
| "step": 1281 |
| }, |
| { |
| "epoch": 1.546441495778046, |
| "grad_norm": 0.31553162770965193, |
| "learning_rate": 2.692135835567471e-05, |
| "loss": 0.3545, |
| "step": 1282 |
| }, |
| { |
| "epoch": 1.5476477683956573, |
| "grad_norm": 0.2860017791263232, |
| "learning_rate": 2.6899016979445933e-05, |
| "loss": 0.3788, |
| "step": 1283 |
| }, |
| { |
| "epoch": 1.548854041013269, |
| "grad_norm": 0.310042700334375, |
| "learning_rate": 2.687667560321716e-05, |
| "loss": 0.3392, |
| "step": 1284 |
| }, |
| { |
| "epoch": 1.5500603136308806, |
| "grad_norm": 0.3101101311732966, |
| "learning_rate": 2.685433422698838e-05, |
| "loss": 0.3831, |
| "step": 1285 |
| }, |
| { |
| "epoch": 1.551266586248492, |
| "grad_norm": 0.24397788929167546, |
| "learning_rate": 2.683199285075961e-05, |
| "loss": 0.3562, |
| "step": 1286 |
| }, |
| { |
| "epoch": 1.5524728588661039, |
| "grad_norm": 0.28475924698050303, |
| "learning_rate": 2.6809651474530833e-05, |
| "loss": 0.3524, |
| "step": 1287 |
| }, |
| { |
| "epoch": 1.5536791314837153, |
| "grad_norm": 0.2912846013587237, |
| "learning_rate": 2.6787310098302054e-05, |
| "loss": 0.3515, |
| "step": 1288 |
| }, |
| { |
| "epoch": 1.554885404101327, |
| "grad_norm": 0.31074089031425983, |
| "learning_rate": 2.676496872207328e-05, |
| "loss": 0.3566, |
| "step": 1289 |
| }, |
| { |
| "epoch": 1.5560916767189386, |
| "grad_norm": 0.28826636625919533, |
| "learning_rate": 2.6742627345844505e-05, |
| "loss": 0.3605, |
| "step": 1290 |
| }, |
| { |
| "epoch": 1.55729794933655, |
| "grad_norm": 0.33111363951512146, |
| "learning_rate": 2.6720285969615726e-05, |
| "loss": 0.3751, |
| "step": 1291 |
| }, |
| { |
| "epoch": 1.5585042219541616, |
| "grad_norm": 0.2677678491527853, |
| "learning_rate": 2.6697944593386954e-05, |
| "loss": 0.3632, |
| "step": 1292 |
| }, |
| { |
| "epoch": 1.5597104945717732, |
| "grad_norm": 0.31135416808124383, |
| "learning_rate": 2.667560321715818e-05, |
| "loss": 0.3744, |
| "step": 1293 |
| }, |
| { |
| "epoch": 1.5609167671893847, |
| "grad_norm": 0.30091299310513797, |
| "learning_rate": 2.66532618409294e-05, |
| "loss": 0.3855, |
| "step": 1294 |
| }, |
| { |
| "epoch": 1.5621230398069965, |
| "grad_norm": 0.23261273418362208, |
| "learning_rate": 2.6630920464700626e-05, |
| "loss": 0.3438, |
| "step": 1295 |
| }, |
| { |
| "epoch": 1.563329312424608, |
| "grad_norm": 0.32048535435397274, |
| "learning_rate": 2.6608579088471853e-05, |
| "loss": 0.3781, |
| "step": 1296 |
| }, |
| { |
| "epoch": 1.5645355850422196, |
| "grad_norm": 0.2628320985489263, |
| "learning_rate": 2.6586237712243074e-05, |
| "loss": 0.3387, |
| "step": 1297 |
| }, |
| { |
| "epoch": 1.5657418576598312, |
| "grad_norm": 0.27460750070719586, |
| "learning_rate": 2.6563896336014298e-05, |
| "loss": 0.3569, |
| "step": 1298 |
| }, |
| { |
| "epoch": 1.5669481302774426, |
| "grad_norm": 0.27441942018435256, |
| "learning_rate": 2.6541554959785526e-05, |
| "loss": 0.3647, |
| "step": 1299 |
| }, |
| { |
| "epoch": 1.5681544028950543, |
| "grad_norm": 0.2876053463336936, |
| "learning_rate": 2.6519213583556746e-05, |
| "loss": 0.3622, |
| "step": 1300 |
| }, |
| { |
| "epoch": 1.569360675512666, |
| "grad_norm": 0.2754619238920535, |
| "learning_rate": 2.649687220732797e-05, |
| "loss": 0.345, |
| "step": 1301 |
| }, |
| { |
| "epoch": 1.5705669481302773, |
| "grad_norm": 0.31328417122026686, |
| "learning_rate": 2.6474530831099198e-05, |
| "loss": 0.3875, |
| "step": 1302 |
| }, |
| { |
| "epoch": 1.5717732207478892, |
| "grad_norm": 0.2610769698506394, |
| "learning_rate": 2.645218945487042e-05, |
| "loss": 0.3473, |
| "step": 1303 |
| }, |
| { |
| "epoch": 1.5729794933655006, |
| "grad_norm": 0.31481781480005866, |
| "learning_rate": 2.6429848078641646e-05, |
| "loss": 0.3639, |
| "step": 1304 |
| }, |
| { |
| "epoch": 1.5741857659831122, |
| "grad_norm": 0.3016217566559489, |
| "learning_rate": 2.640750670241287e-05, |
| "loss": 0.3769, |
| "step": 1305 |
| }, |
| { |
| "epoch": 1.5753920386007239, |
| "grad_norm": 0.3065182968634683, |
| "learning_rate": 2.6385165326184098e-05, |
| "loss": 0.3576, |
| "step": 1306 |
| }, |
| { |
| "epoch": 1.5765983112183353, |
| "grad_norm": 0.3094495579248347, |
| "learning_rate": 2.6362823949955318e-05, |
| "loss": 0.3488, |
| "step": 1307 |
| }, |
| { |
| "epoch": 1.577804583835947, |
| "grad_norm": 0.37263099382228515, |
| "learning_rate": 2.6340482573726542e-05, |
| "loss": 0.365, |
| "step": 1308 |
| }, |
| { |
| "epoch": 1.5790108564535585, |
| "grad_norm": 0.2532097044448258, |
| "learning_rate": 2.631814119749777e-05, |
| "loss": 0.3459, |
| "step": 1309 |
| }, |
| { |
| "epoch": 1.58021712907117, |
| "grad_norm": 0.30034042957794177, |
| "learning_rate": 2.629579982126899e-05, |
| "loss": 0.3479, |
| "step": 1310 |
| }, |
| { |
| "epoch": 1.5814234016887818, |
| "grad_norm": 0.4315569077455076, |
| "learning_rate": 2.6273458445040218e-05, |
| "loss": 0.3785, |
| "step": 1311 |
| }, |
| { |
| "epoch": 1.5826296743063932, |
| "grad_norm": 0.2611868817203264, |
| "learning_rate": 2.6251117068811442e-05, |
| "loss": 0.3688, |
| "step": 1312 |
| }, |
| { |
| "epoch": 1.5838359469240049, |
| "grad_norm": 0.3436018569495356, |
| "learning_rate": 2.6228775692582663e-05, |
| "loss": 0.3661, |
| "step": 1313 |
| }, |
| { |
| "epoch": 1.5850422195416165, |
| "grad_norm": 0.3775983171861393, |
| "learning_rate": 2.620643431635389e-05, |
| "loss": 0.35, |
| "step": 1314 |
| }, |
| { |
| "epoch": 1.586248492159228, |
| "grad_norm": 0.260001030240192, |
| "learning_rate": 2.6184092940125114e-05, |
| "loss": 0.3607, |
| "step": 1315 |
| }, |
| { |
| "epoch": 1.5874547647768396, |
| "grad_norm": 0.3192806233423819, |
| "learning_rate": 2.6161751563896335e-05, |
| "loss": 0.3605, |
| "step": 1316 |
| }, |
| { |
| "epoch": 1.5886610373944512, |
| "grad_norm": 0.34906939857027053, |
| "learning_rate": 2.6139410187667563e-05, |
| "loss": 0.3392, |
| "step": 1317 |
| }, |
| { |
| "epoch": 1.5898673100120626, |
| "grad_norm": 0.3041401770631904, |
| "learning_rate": 2.611706881143879e-05, |
| "loss": 0.3579, |
| "step": 1318 |
| }, |
| { |
| "epoch": 1.5910735826296745, |
| "grad_norm": 0.30910377982596804, |
| "learning_rate": 2.609472743521001e-05, |
| "loss": 0.3506, |
| "step": 1319 |
| }, |
| { |
| "epoch": 1.5922798552472859, |
| "grad_norm": 0.3812561649468747, |
| "learning_rate": 2.6072386058981235e-05, |
| "loss": 0.3753, |
| "step": 1320 |
| }, |
| { |
| "epoch": 1.5934861278648975, |
| "grad_norm": 0.2945017335023286, |
| "learning_rate": 2.6050044682752462e-05, |
| "loss": 0.3779, |
| "step": 1321 |
| }, |
| { |
| "epoch": 1.5946924004825092, |
| "grad_norm": 0.3169444082111978, |
| "learning_rate": 2.6027703306523683e-05, |
| "loss": 0.3455, |
| "step": 1322 |
| }, |
| { |
| "epoch": 1.5958986731001206, |
| "grad_norm": 0.30538941387132723, |
| "learning_rate": 2.6005361930294907e-05, |
| "loss": 0.3619, |
| "step": 1323 |
| }, |
| { |
| "epoch": 1.5971049457177322, |
| "grad_norm": 0.2750984977884928, |
| "learning_rate": 2.5983020554066135e-05, |
| "loss": 0.3808, |
| "step": 1324 |
| }, |
| { |
| "epoch": 1.5983112183353438, |
| "grad_norm": 0.30285454154498515, |
| "learning_rate": 2.5960679177837355e-05, |
| "loss": 0.3519, |
| "step": 1325 |
| }, |
| { |
| "epoch": 1.5995174909529553, |
| "grad_norm": 0.33944179519883355, |
| "learning_rate": 2.593833780160858e-05, |
| "loss": 0.3616, |
| "step": 1326 |
| }, |
| { |
| "epoch": 1.600723763570567, |
| "grad_norm": 0.26318466057317974, |
| "learning_rate": 2.5915996425379807e-05, |
| "loss": 0.3706, |
| "step": 1327 |
| }, |
| { |
| "epoch": 1.6019300361881785, |
| "grad_norm": 0.29783411165362406, |
| "learning_rate": 2.5893655049151027e-05, |
| "loss": 0.3572, |
| "step": 1328 |
| }, |
| { |
| "epoch": 1.60313630880579, |
| "grad_norm": 0.35002375269009606, |
| "learning_rate": 2.5871313672922255e-05, |
| "loss": 0.3579, |
| "step": 1329 |
| }, |
| { |
| "epoch": 1.6043425814234018, |
| "grad_norm": 0.29418265664397103, |
| "learning_rate": 2.584897229669348e-05, |
| "loss": 0.372, |
| "step": 1330 |
| }, |
| { |
| "epoch": 1.6055488540410132, |
| "grad_norm": 0.3203570332410853, |
| "learning_rate": 2.58266309204647e-05, |
| "loss": 0.354, |
| "step": 1331 |
| }, |
| { |
| "epoch": 1.6067551266586249, |
| "grad_norm": 0.3258614955229903, |
| "learning_rate": 2.5804289544235927e-05, |
| "loss": 0.3599, |
| "step": 1332 |
| }, |
| { |
| "epoch": 1.6079613992762365, |
| "grad_norm": 0.27796800216478634, |
| "learning_rate": 2.578194816800715e-05, |
| "loss": 0.3685, |
| "step": 1333 |
| }, |
| { |
| "epoch": 1.609167671893848, |
| "grad_norm": 0.3939022371539636, |
| "learning_rate": 2.5759606791778372e-05, |
| "loss": 0.3825, |
| "step": 1334 |
| }, |
| { |
| "epoch": 1.6103739445114595, |
| "grad_norm": 0.2834697874561306, |
| "learning_rate": 2.57372654155496e-05, |
| "loss": 0.3795, |
| "step": 1335 |
| }, |
| { |
| "epoch": 1.6115802171290712, |
| "grad_norm": 0.29584363725941537, |
| "learning_rate": 2.5714924039320827e-05, |
| "loss": 0.3788, |
| "step": 1336 |
| }, |
| { |
| "epoch": 1.6127864897466826, |
| "grad_norm": 0.2986606089197964, |
| "learning_rate": 2.5692582663092048e-05, |
| "loss": 0.3662, |
| "step": 1337 |
| }, |
| { |
| "epoch": 1.6139927623642945, |
| "grad_norm": 0.33526376758814486, |
| "learning_rate": 2.5670241286863272e-05, |
| "loss": 0.3507, |
| "step": 1338 |
| }, |
| { |
| "epoch": 1.6151990349819059, |
| "grad_norm": 0.2741610353438533, |
| "learning_rate": 2.56478999106345e-05, |
| "loss": 0.3761, |
| "step": 1339 |
| }, |
| { |
| "epoch": 1.6164053075995175, |
| "grad_norm": 0.28973832017005374, |
| "learning_rate": 2.562555853440572e-05, |
| "loss": 0.3756, |
| "step": 1340 |
| }, |
| { |
| "epoch": 1.6176115802171291, |
| "grad_norm": 0.28594808720953097, |
| "learning_rate": 2.5603217158176944e-05, |
| "loss": 0.3501, |
| "step": 1341 |
| }, |
| { |
| "epoch": 1.6188178528347406, |
| "grad_norm": 0.27271839503423706, |
| "learning_rate": 2.558087578194817e-05, |
| "loss": 0.3552, |
| "step": 1342 |
| }, |
| { |
| "epoch": 1.6200241254523522, |
| "grad_norm": 0.2709700411591409, |
| "learning_rate": 2.5558534405719392e-05, |
| "loss": 0.3415, |
| "step": 1343 |
| }, |
| { |
| "epoch": 1.6212303980699638, |
| "grad_norm": 0.2934700703302023, |
| "learning_rate": 2.553619302949062e-05, |
| "loss": 0.3575, |
| "step": 1344 |
| }, |
| { |
| "epoch": 1.6224366706875752, |
| "grad_norm": 0.3093331948166226, |
| "learning_rate": 2.5513851653261844e-05, |
| "loss": 0.3487, |
| "step": 1345 |
| }, |
| { |
| "epoch": 1.623642943305187, |
| "grad_norm": 0.2587526200120511, |
| "learning_rate": 2.5491510277033064e-05, |
| "loss": 0.3523, |
| "step": 1346 |
| }, |
| { |
| "epoch": 1.6248492159227985, |
| "grad_norm": 0.30187549679671233, |
| "learning_rate": 2.5469168900804292e-05, |
| "loss": 0.3621, |
| "step": 1347 |
| }, |
| { |
| "epoch": 1.6260554885404102, |
| "grad_norm": 0.35929811837951886, |
| "learning_rate": 2.5446827524575516e-05, |
| "loss": 0.3724, |
| "step": 1348 |
| }, |
| { |
| "epoch": 1.6272617611580218, |
| "grad_norm": 0.2852663310752726, |
| "learning_rate": 2.5424486148346737e-05, |
| "loss": 0.3538, |
| "step": 1349 |
| }, |
| { |
| "epoch": 1.6284680337756332, |
| "grad_norm": 0.33317652752316707, |
| "learning_rate": 2.5402144772117964e-05, |
| "loss": 0.4013, |
| "step": 1350 |
| }, |
| { |
| "epoch": 1.6296743063932448, |
| "grad_norm": 0.33926976945090287, |
| "learning_rate": 2.5379803395889192e-05, |
| "loss": 0.3608, |
| "step": 1351 |
| }, |
| { |
| "epoch": 1.6308805790108565, |
| "grad_norm": 0.29336918116275684, |
| "learning_rate": 2.535746201966041e-05, |
| "loss": 0.3624, |
| "step": 1352 |
| }, |
| { |
| "epoch": 1.632086851628468, |
| "grad_norm": 0.32537359239341435, |
| "learning_rate": 2.5335120643431636e-05, |
| "loss": 0.3667, |
| "step": 1353 |
| }, |
| { |
| "epoch": 1.6332931242460798, |
| "grad_norm": 0.31694181375333264, |
| "learning_rate": 2.5312779267202864e-05, |
| "loss": 0.3572, |
| "step": 1354 |
| }, |
| { |
| "epoch": 1.6344993968636912, |
| "grad_norm": 0.31129200279927227, |
| "learning_rate": 2.5290437890974085e-05, |
| "loss": 0.3566, |
| "step": 1355 |
| }, |
| { |
| "epoch": 1.6357056694813028, |
| "grad_norm": 0.3026625971829585, |
| "learning_rate": 2.526809651474531e-05, |
| "loss": 0.3511, |
| "step": 1356 |
| }, |
| { |
| "epoch": 1.6369119420989144, |
| "grad_norm": 0.2778204933785298, |
| "learning_rate": 2.5245755138516536e-05, |
| "loss": 0.3515, |
| "step": 1357 |
| }, |
| { |
| "epoch": 1.6381182147165259, |
| "grad_norm": 0.28112074622730776, |
| "learning_rate": 2.5223413762287757e-05, |
| "loss": 0.3467, |
| "step": 1358 |
| }, |
| { |
| "epoch": 1.6393244873341375, |
| "grad_norm": 0.2760083290003972, |
| "learning_rate": 2.520107238605898e-05, |
| "loss": 0.3431, |
| "step": 1359 |
| }, |
| { |
| "epoch": 1.6405307599517491, |
| "grad_norm": 0.2840407711954704, |
| "learning_rate": 2.517873100983021e-05, |
| "loss": 0.337, |
| "step": 1360 |
| }, |
| { |
| "epoch": 1.6417370325693605, |
| "grad_norm": 0.2665258347081383, |
| "learning_rate": 2.515638963360143e-05, |
| "loss": 0.3506, |
| "step": 1361 |
| }, |
| { |
| "epoch": 1.6429433051869724, |
| "grad_norm": 0.32503179208618316, |
| "learning_rate": 2.5134048257372657e-05, |
| "loss": 0.3471, |
| "step": 1362 |
| }, |
| { |
| "epoch": 1.6441495778045838, |
| "grad_norm": 0.30286222825612846, |
| "learning_rate": 2.511170688114388e-05, |
| "loss": 0.363, |
| "step": 1363 |
| }, |
| { |
| "epoch": 1.6453558504221955, |
| "grad_norm": 0.29668163519295065, |
| "learning_rate": 2.50893655049151e-05, |
| "loss": 0.3752, |
| "step": 1364 |
| }, |
| { |
| "epoch": 1.646562123039807, |
| "grad_norm": 0.34957635877197785, |
| "learning_rate": 2.506702412868633e-05, |
| "loss": 0.3631, |
| "step": 1365 |
| }, |
| { |
| "epoch": 1.6477683956574185, |
| "grad_norm": 0.28633563148090796, |
| "learning_rate": 2.5044682752457553e-05, |
| "loss": 0.368, |
| "step": 1366 |
| }, |
| { |
| "epoch": 1.6489746682750301, |
| "grad_norm": 0.3607510749471786, |
| "learning_rate": 2.5022341376228774e-05, |
| "loss": 0.3544, |
| "step": 1367 |
| }, |
| { |
| "epoch": 1.6501809408926418, |
| "grad_norm": 0.3295890075011459, |
| "learning_rate": 2.5e-05, |
| "loss": 0.349, |
| "step": 1368 |
| }, |
| { |
| "epoch": 1.6513872135102532, |
| "grad_norm": 0.34302365121553463, |
| "learning_rate": 2.4977658623771225e-05, |
| "loss": 0.3565, |
| "step": 1369 |
| }, |
| { |
| "epoch": 1.652593486127865, |
| "grad_norm": 0.30784854721347316, |
| "learning_rate": 2.495531724754245e-05, |
| "loss": 0.3598, |
| "step": 1370 |
| }, |
| { |
| "epoch": 1.6537997587454765, |
| "grad_norm": 0.3580153689284883, |
| "learning_rate": 2.4932975871313673e-05, |
| "loss": 0.3552, |
| "step": 1371 |
| }, |
| { |
| "epoch": 1.655006031363088, |
| "grad_norm": 0.32893911340548043, |
| "learning_rate": 2.4910634495084898e-05, |
| "loss": 0.3796, |
| "step": 1372 |
| }, |
| { |
| "epoch": 1.6562123039806997, |
| "grad_norm": 0.34401166770895486, |
| "learning_rate": 2.4888293118856125e-05, |
| "loss": 0.3465, |
| "step": 1373 |
| }, |
| { |
| "epoch": 1.6574185765983112, |
| "grad_norm": 0.34733562136621865, |
| "learning_rate": 2.4865951742627346e-05, |
| "loss": 0.3728, |
| "step": 1374 |
| }, |
| { |
| "epoch": 1.6586248492159228, |
| "grad_norm": 0.3415210711237987, |
| "learning_rate": 2.484361036639857e-05, |
| "loss": 0.3738, |
| "step": 1375 |
| }, |
| { |
| "epoch": 1.6598311218335344, |
| "grad_norm": 0.3564407234532172, |
| "learning_rate": 2.4821268990169797e-05, |
| "loss": 0.3554, |
| "step": 1376 |
| }, |
| { |
| "epoch": 1.6610373944511458, |
| "grad_norm": 0.2733304045447679, |
| "learning_rate": 2.479892761394102e-05, |
| "loss": 0.3588, |
| "step": 1377 |
| }, |
| { |
| "epoch": 1.6622436670687577, |
| "grad_norm": 0.35404845473786883, |
| "learning_rate": 2.4776586237712242e-05, |
| "loss": 0.3456, |
| "step": 1378 |
| }, |
| { |
| "epoch": 1.6634499396863691, |
| "grad_norm": 0.26342823241932845, |
| "learning_rate": 2.475424486148347e-05, |
| "loss": 0.3445, |
| "step": 1379 |
| }, |
| { |
| "epoch": 1.6646562123039808, |
| "grad_norm": 0.3775927461538472, |
| "learning_rate": 2.4731903485254694e-05, |
| "loss": 0.3562, |
| "step": 1380 |
| }, |
| { |
| "epoch": 1.6658624849215924, |
| "grad_norm": 0.2903268980143278, |
| "learning_rate": 2.4709562109025918e-05, |
| "loss": 0.3657, |
| "step": 1381 |
| }, |
| { |
| "epoch": 1.6670687575392038, |
| "grad_norm": 0.31050102348781466, |
| "learning_rate": 2.4687220732797142e-05, |
| "loss": 0.3697, |
| "step": 1382 |
| }, |
| { |
| "epoch": 1.6682750301568154, |
| "grad_norm": 0.3099732969543504, |
| "learning_rate": 2.4664879356568366e-05, |
| "loss": 0.369, |
| "step": 1383 |
| }, |
| { |
| "epoch": 1.669481302774427, |
| "grad_norm": 0.30160506449369273, |
| "learning_rate": 2.464253798033959e-05, |
| "loss": 0.3599, |
| "step": 1384 |
| }, |
| { |
| "epoch": 1.6706875753920385, |
| "grad_norm": 0.3537825638200836, |
| "learning_rate": 2.4620196604110814e-05, |
| "loss": 0.3727, |
| "step": 1385 |
| }, |
| { |
| "epoch": 1.6718938480096501, |
| "grad_norm": 0.29825631027148963, |
| "learning_rate": 2.4597855227882038e-05, |
| "loss": 0.3608, |
| "step": 1386 |
| }, |
| { |
| "epoch": 1.6731001206272618, |
| "grad_norm": 0.3244351587542817, |
| "learning_rate": 2.4575513851653262e-05, |
| "loss": 0.3644, |
| "step": 1387 |
| }, |
| { |
| "epoch": 1.6743063932448732, |
| "grad_norm": 0.2692023706580779, |
| "learning_rate": 2.4553172475424486e-05, |
| "loss": 0.3862, |
| "step": 1388 |
| }, |
| { |
| "epoch": 1.675512665862485, |
| "grad_norm": 0.33882016385543945, |
| "learning_rate": 2.453083109919571e-05, |
| "loss": 0.3516, |
| "step": 1389 |
| }, |
| { |
| "epoch": 1.6767189384800965, |
| "grad_norm": 0.3351467173827849, |
| "learning_rate": 2.4508489722966935e-05, |
| "loss": 0.3576, |
| "step": 1390 |
| }, |
| { |
| "epoch": 1.677925211097708, |
| "grad_norm": 0.29184859188818785, |
| "learning_rate": 2.4486148346738162e-05, |
| "loss": 0.349, |
| "step": 1391 |
| }, |
| { |
| "epoch": 1.6791314837153197, |
| "grad_norm": 0.4019244815049262, |
| "learning_rate": 2.4463806970509383e-05, |
| "loss": 0.3743, |
| "step": 1392 |
| }, |
| { |
| "epoch": 1.6803377563329311, |
| "grad_norm": 0.3100696829967641, |
| "learning_rate": 2.4441465594280607e-05, |
| "loss": 0.3493, |
| "step": 1393 |
| }, |
| { |
| "epoch": 1.6815440289505428, |
| "grad_norm": 0.37990510032559094, |
| "learning_rate": 2.4419124218051834e-05, |
| "loss": 0.3404, |
| "step": 1394 |
| }, |
| { |
| "epoch": 1.6827503015681544, |
| "grad_norm": 0.28147957637419957, |
| "learning_rate": 2.439678284182306e-05, |
| "loss": 0.3683, |
| "step": 1395 |
| }, |
| { |
| "epoch": 1.6839565741857658, |
| "grad_norm": 0.2944742115661062, |
| "learning_rate": 2.437444146559428e-05, |
| "loss": 0.3448, |
| "step": 1396 |
| }, |
| { |
| "epoch": 1.6851628468033777, |
| "grad_norm": 0.29809478369119624, |
| "learning_rate": 2.4352100089365507e-05, |
| "loss": 0.3471, |
| "step": 1397 |
| }, |
| { |
| "epoch": 1.686369119420989, |
| "grad_norm": 0.26699037994675995, |
| "learning_rate": 2.432975871313673e-05, |
| "loss": 0.3473, |
| "step": 1398 |
| }, |
| { |
| "epoch": 1.6875753920386007, |
| "grad_norm": 0.33536542802489927, |
| "learning_rate": 2.4307417336907955e-05, |
| "loss": 0.3771, |
| "step": 1399 |
| }, |
| { |
| "epoch": 1.6887816646562124, |
| "grad_norm": 0.300867897752028, |
| "learning_rate": 2.428507596067918e-05, |
| "loss": 0.3517, |
| "step": 1400 |
| }, |
| { |
| "epoch": 1.6899879372738238, |
| "grad_norm": 0.32504296903202723, |
| "learning_rate": 2.4262734584450403e-05, |
| "loss": 0.3487, |
| "step": 1401 |
| }, |
| { |
| "epoch": 1.6911942098914354, |
| "grad_norm": 0.276555968975667, |
| "learning_rate": 2.424039320822163e-05, |
| "loss": 0.3643, |
| "step": 1402 |
| }, |
| { |
| "epoch": 1.692400482509047, |
| "grad_norm": 0.3017611551074008, |
| "learning_rate": 2.421805183199285e-05, |
| "loss": 0.365, |
| "step": 1403 |
| }, |
| { |
| "epoch": 1.6936067551266585, |
| "grad_norm": 0.2672773162267323, |
| "learning_rate": 2.4195710455764075e-05, |
| "loss": 0.3522, |
| "step": 1404 |
| }, |
| { |
| "epoch": 1.6948130277442703, |
| "grad_norm": 0.31404716787538994, |
| "learning_rate": 2.4173369079535303e-05, |
| "loss": 0.3486, |
| "step": 1405 |
| }, |
| { |
| "epoch": 1.6960193003618818, |
| "grad_norm": 0.29569604012112444, |
| "learning_rate": 2.4151027703306527e-05, |
| "loss": 0.3664, |
| "step": 1406 |
| }, |
| { |
| "epoch": 1.6972255729794934, |
| "grad_norm": 0.27535191548620136, |
| "learning_rate": 2.4128686327077747e-05, |
| "loss": 0.3702, |
| "step": 1407 |
| }, |
| { |
| "epoch": 1.698431845597105, |
| "grad_norm": 0.260506311606101, |
| "learning_rate": 2.4106344950848975e-05, |
| "loss": 0.3678, |
| "step": 1408 |
| }, |
| { |
| "epoch": 1.6996381182147164, |
| "grad_norm": 0.30167357587290894, |
| "learning_rate": 2.40840035746202e-05, |
| "loss": 0.3625, |
| "step": 1409 |
| }, |
| { |
| "epoch": 1.700844390832328, |
| "grad_norm": 0.29521679266675266, |
| "learning_rate": 2.4061662198391423e-05, |
| "loss": 0.3501, |
| "step": 1410 |
| }, |
| { |
| "epoch": 1.7020506634499397, |
| "grad_norm": 0.31021002864555397, |
| "learning_rate": 2.4039320822162647e-05, |
| "loss": 0.3836, |
| "step": 1411 |
| }, |
| { |
| "epoch": 1.7032569360675511, |
| "grad_norm": 0.282035427062887, |
| "learning_rate": 2.401697944593387e-05, |
| "loss": 0.3655, |
| "step": 1412 |
| }, |
| { |
| "epoch": 1.704463208685163, |
| "grad_norm": 0.33351450606256217, |
| "learning_rate": 2.3994638069705095e-05, |
| "loss": 0.3887, |
| "step": 1413 |
| }, |
| { |
| "epoch": 1.7056694813027744, |
| "grad_norm": 0.283269591150281, |
| "learning_rate": 2.397229669347632e-05, |
| "loss": 0.3631, |
| "step": 1414 |
| }, |
| { |
| "epoch": 1.706875753920386, |
| "grad_norm": 0.2946135788809987, |
| "learning_rate": 2.3949955317247544e-05, |
| "loss": 0.3646, |
| "step": 1415 |
| }, |
| { |
| "epoch": 1.7080820265379977, |
| "grad_norm": 0.31742421947897204, |
| "learning_rate": 2.3927613941018768e-05, |
| "loss": 0.3691, |
| "step": 1416 |
| }, |
| { |
| "epoch": 1.709288299155609, |
| "grad_norm": 0.33683394127880345, |
| "learning_rate": 2.390527256478999e-05, |
| "loss": 0.3524, |
| "step": 1417 |
| }, |
| { |
| "epoch": 1.7104945717732207, |
| "grad_norm": 0.2731456928477606, |
| "learning_rate": 2.3882931188561216e-05, |
| "loss": 0.3401, |
| "step": 1418 |
| }, |
| { |
| "epoch": 1.7117008443908324, |
| "grad_norm": 0.3471290083601201, |
| "learning_rate": 2.386058981233244e-05, |
| "loss": 0.3553, |
| "step": 1419 |
| }, |
| { |
| "epoch": 1.7129071170084438, |
| "grad_norm": 0.3528869496369736, |
| "learning_rate": 2.3838248436103667e-05, |
| "loss": 0.324, |
| "step": 1420 |
| }, |
| { |
| "epoch": 1.7141133896260556, |
| "grad_norm": 0.2925982551552773, |
| "learning_rate": 2.3815907059874888e-05, |
| "loss": 0.3751, |
| "step": 1421 |
| }, |
| { |
| "epoch": 1.715319662243667, |
| "grad_norm": 0.36851576055596835, |
| "learning_rate": 2.3793565683646112e-05, |
| "loss": 0.3601, |
| "step": 1422 |
| }, |
| { |
| "epoch": 1.7165259348612787, |
| "grad_norm": 0.30235285015745955, |
| "learning_rate": 2.377122430741734e-05, |
| "loss": 0.3464, |
| "step": 1423 |
| }, |
| { |
| "epoch": 1.7177322074788903, |
| "grad_norm": 0.29874368477246893, |
| "learning_rate": 2.3748882931188564e-05, |
| "loss": 0.3626, |
| "step": 1424 |
| }, |
| { |
| "epoch": 1.7189384800965017, |
| "grad_norm": 0.3260772749237896, |
| "learning_rate": 2.3726541554959784e-05, |
| "loss": 0.3763, |
| "step": 1425 |
| }, |
| { |
| "epoch": 1.7201447527141134, |
| "grad_norm": 0.3341146497077902, |
| "learning_rate": 2.3704200178731012e-05, |
| "loss": 0.3521, |
| "step": 1426 |
| }, |
| { |
| "epoch": 1.721351025331725, |
| "grad_norm": 0.28157924981156457, |
| "learning_rate": 2.3681858802502236e-05, |
| "loss": 0.3622, |
| "step": 1427 |
| }, |
| { |
| "epoch": 1.7225572979493364, |
| "grad_norm": 0.3739702781502209, |
| "learning_rate": 2.365951742627346e-05, |
| "loss": 0.4076, |
| "step": 1428 |
| }, |
| { |
| "epoch": 1.7237635705669483, |
| "grad_norm": 0.3665408279544029, |
| "learning_rate": 2.3637176050044684e-05, |
| "loss": 0.3794, |
| "step": 1429 |
| }, |
| { |
| "epoch": 1.7249698431845597, |
| "grad_norm": 0.2935452940038669, |
| "learning_rate": 2.3614834673815908e-05, |
| "loss": 0.372, |
| "step": 1430 |
| }, |
| { |
| "epoch": 1.7261761158021713, |
| "grad_norm": 0.3244721429577489, |
| "learning_rate": 2.3592493297587132e-05, |
| "loss": 0.3609, |
| "step": 1431 |
| }, |
| { |
| "epoch": 1.727382388419783, |
| "grad_norm": 0.2918770790325217, |
| "learning_rate": 2.3570151921358356e-05, |
| "loss": 0.3673, |
| "step": 1432 |
| }, |
| { |
| "epoch": 1.7285886610373944, |
| "grad_norm": 0.2992819875803547, |
| "learning_rate": 2.354781054512958e-05, |
| "loss": 0.3571, |
| "step": 1433 |
| }, |
| { |
| "epoch": 1.729794933655006, |
| "grad_norm": 0.29707565974563616, |
| "learning_rate": 2.3525469168900805e-05, |
| "loss": 0.3901, |
| "step": 1434 |
| }, |
| { |
| "epoch": 1.7310012062726177, |
| "grad_norm": 0.32560925039212163, |
| "learning_rate": 2.3503127792672032e-05, |
| "loss": 0.3416, |
| "step": 1435 |
| }, |
| { |
| "epoch": 1.732207478890229, |
| "grad_norm": 0.2690827871626142, |
| "learning_rate": 2.3480786416443253e-05, |
| "loss": 0.3683, |
| "step": 1436 |
| }, |
| { |
| "epoch": 1.7334137515078407, |
| "grad_norm": 0.3060993415094302, |
| "learning_rate": 2.3458445040214477e-05, |
| "loss": 0.3587, |
| "step": 1437 |
| }, |
| { |
| "epoch": 1.7346200241254524, |
| "grad_norm": 0.2505412183050965, |
| "learning_rate": 2.3436103663985704e-05, |
| "loss": 0.3674, |
| "step": 1438 |
| }, |
| { |
| "epoch": 1.7358262967430638, |
| "grad_norm": 0.2653457404524692, |
| "learning_rate": 2.341376228775693e-05, |
| "loss": 0.3576, |
| "step": 1439 |
| }, |
| { |
| "epoch": 1.7370325693606756, |
| "grad_norm": 0.2771853156246843, |
| "learning_rate": 2.339142091152815e-05, |
| "loss": 0.3812, |
| "step": 1440 |
| }, |
| { |
| "epoch": 1.738238841978287, |
| "grad_norm": 0.2596381263418958, |
| "learning_rate": 2.3369079535299377e-05, |
| "loss": 0.359, |
| "step": 1441 |
| }, |
| { |
| "epoch": 1.7394451145958987, |
| "grad_norm": 0.28891917561566355, |
| "learning_rate": 2.33467381590706e-05, |
| "loss": 0.3586, |
| "step": 1442 |
| }, |
| { |
| "epoch": 1.7406513872135103, |
| "grad_norm": 0.27706301721058363, |
| "learning_rate": 2.332439678284182e-05, |
| "loss": 0.3461, |
| "step": 1443 |
| }, |
| { |
| "epoch": 1.7418576598311217, |
| "grad_norm": 0.29260701520716376, |
| "learning_rate": 2.330205540661305e-05, |
| "loss": 0.3525, |
| "step": 1444 |
| }, |
| { |
| "epoch": 1.7430639324487334, |
| "grad_norm": 0.26347051526297266, |
| "learning_rate": 2.3279714030384273e-05, |
| "loss": 0.3432, |
| "step": 1445 |
| }, |
| { |
| "epoch": 1.744270205066345, |
| "grad_norm": 0.28261285996650526, |
| "learning_rate": 2.3257372654155497e-05, |
| "loss": 0.3542, |
| "step": 1446 |
| }, |
| { |
| "epoch": 1.7454764776839564, |
| "grad_norm": 0.2802265796653411, |
| "learning_rate": 2.323503127792672e-05, |
| "loss": 0.3862, |
| "step": 1447 |
| }, |
| { |
| "epoch": 1.7466827503015683, |
| "grad_norm": 0.2602101357811866, |
| "learning_rate": 2.3212689901697945e-05, |
| "loss": 0.3601, |
| "step": 1448 |
| }, |
| { |
| "epoch": 1.7478890229191797, |
| "grad_norm": 0.2512605438487134, |
| "learning_rate": 2.319034852546917e-05, |
| "loss": 0.3591, |
| "step": 1449 |
| }, |
| { |
| "epoch": 1.7490952955367913, |
| "grad_norm": 0.24761708762692664, |
| "learning_rate": 2.3168007149240393e-05, |
| "loss": 0.3555, |
| "step": 1450 |
| }, |
| { |
| "epoch": 1.750301568154403, |
| "grad_norm": 0.3120056641615667, |
| "learning_rate": 2.3145665773011617e-05, |
| "loss": 0.3604, |
| "step": 1451 |
| }, |
| { |
| "epoch": 1.7515078407720144, |
| "grad_norm": 0.28674400953294193, |
| "learning_rate": 2.312332439678284e-05, |
| "loss": 0.3379, |
| "step": 1452 |
| }, |
| { |
| "epoch": 1.752714113389626, |
| "grad_norm": 0.25622102965400334, |
| "learning_rate": 2.310098302055407e-05, |
| "loss": 0.3593, |
| "step": 1453 |
| }, |
| { |
| "epoch": 1.7539203860072377, |
| "grad_norm": 0.35149468504184234, |
| "learning_rate": 2.307864164432529e-05, |
| "loss": 0.3641, |
| "step": 1454 |
| }, |
| { |
| "epoch": 1.755126658624849, |
| "grad_norm": 0.2850804434851281, |
| "learning_rate": 2.3056300268096514e-05, |
| "loss": 0.354, |
| "step": 1455 |
| }, |
| { |
| "epoch": 1.756332931242461, |
| "grad_norm": 0.3256278208536396, |
| "learning_rate": 2.303395889186774e-05, |
| "loss": 0.3588, |
| "step": 1456 |
| }, |
| { |
| "epoch": 1.7575392038600723, |
| "grad_norm": 0.39924915699406693, |
| "learning_rate": 2.3011617515638965e-05, |
| "loss": 0.3772, |
| "step": 1457 |
| }, |
| { |
| "epoch": 1.758745476477684, |
| "grad_norm": 0.3071970298535891, |
| "learning_rate": 2.2989276139410186e-05, |
| "loss": 0.3565, |
| "step": 1458 |
| }, |
| { |
| "epoch": 1.7599517490952956, |
| "grad_norm": 0.33320432329737215, |
| "learning_rate": 2.2966934763181414e-05, |
| "loss": 0.3496, |
| "step": 1459 |
| }, |
| { |
| "epoch": 1.761158021712907, |
| "grad_norm": 0.31284583547199185, |
| "learning_rate": 2.2944593386952638e-05, |
| "loss": 0.3627, |
| "step": 1460 |
| }, |
| { |
| "epoch": 1.7623642943305187, |
| "grad_norm": 0.3720962834352751, |
| "learning_rate": 2.2922252010723862e-05, |
| "loss": 0.3764, |
| "step": 1461 |
| }, |
| { |
| "epoch": 1.7635705669481303, |
| "grad_norm": 0.28507295604604466, |
| "learning_rate": 2.2899910634495086e-05, |
| "loss": 0.3605, |
| "step": 1462 |
| }, |
| { |
| "epoch": 1.7647768395657417, |
| "grad_norm": 0.35564273103301347, |
| "learning_rate": 2.287756925826631e-05, |
| "loss": 0.3675, |
| "step": 1463 |
| }, |
| { |
| "epoch": 1.7659831121833536, |
| "grad_norm": 0.28933965320173666, |
| "learning_rate": 2.2855227882037537e-05, |
| "loss": 0.3406, |
| "step": 1464 |
| }, |
| { |
| "epoch": 1.767189384800965, |
| "grad_norm": 0.3116459317914364, |
| "learning_rate": 2.2832886505808758e-05, |
| "loss": 0.355, |
| "step": 1465 |
| }, |
| { |
| "epoch": 1.7683956574185766, |
| "grad_norm": 0.27245033306948696, |
| "learning_rate": 2.2810545129579982e-05, |
| "loss": 0.3462, |
| "step": 1466 |
| }, |
| { |
| "epoch": 1.7696019300361883, |
| "grad_norm": 0.2869215998169608, |
| "learning_rate": 2.278820375335121e-05, |
| "loss": 0.3733, |
| "step": 1467 |
| }, |
| { |
| "epoch": 1.7708082026537997, |
| "grad_norm": 0.3113608243209837, |
| "learning_rate": 2.2765862377122434e-05, |
| "loss": 0.3506, |
| "step": 1468 |
| }, |
| { |
| "epoch": 1.7720144752714113, |
| "grad_norm": 0.3207842949128961, |
| "learning_rate": 2.2743521000893654e-05, |
| "loss": 0.3631, |
| "step": 1469 |
| }, |
| { |
| "epoch": 1.773220747889023, |
| "grad_norm": 0.29624424369525815, |
| "learning_rate": 2.2721179624664882e-05, |
| "loss": 0.3649, |
| "step": 1470 |
| }, |
| { |
| "epoch": 1.7744270205066344, |
| "grad_norm": 0.3303763513390022, |
| "learning_rate": 2.2698838248436106e-05, |
| "loss": 0.3633, |
| "step": 1471 |
| }, |
| { |
| "epoch": 1.7756332931242462, |
| "grad_norm": 0.3227006129000047, |
| "learning_rate": 2.267649687220733e-05, |
| "loss": 0.3556, |
| "step": 1472 |
| }, |
| { |
| "epoch": 1.7768395657418576, |
| "grad_norm": 0.3393310341911691, |
| "learning_rate": 2.2654155495978554e-05, |
| "loss": 0.3631, |
| "step": 1473 |
| }, |
| { |
| "epoch": 1.7780458383594693, |
| "grad_norm": 0.3529552361183484, |
| "learning_rate": 2.2631814119749778e-05, |
| "loss": 0.3546, |
| "step": 1474 |
| }, |
| { |
| "epoch": 1.779252110977081, |
| "grad_norm": 0.31525744469559364, |
| "learning_rate": 2.2609472743521002e-05, |
| "loss": 0.343, |
| "step": 1475 |
| }, |
| { |
| "epoch": 1.7804583835946923, |
| "grad_norm": 0.3163718158924185, |
| "learning_rate": 2.2587131367292226e-05, |
| "loss": 0.3692, |
| "step": 1476 |
| }, |
| { |
| "epoch": 1.781664656212304, |
| "grad_norm": 0.32561762908782643, |
| "learning_rate": 2.256478999106345e-05, |
| "loss": 0.364, |
| "step": 1477 |
| }, |
| { |
| "epoch": 1.7828709288299156, |
| "grad_norm": 0.27131184421929944, |
| "learning_rate": 2.2542448614834675e-05, |
| "loss": 0.3518, |
| "step": 1478 |
| }, |
| { |
| "epoch": 1.784077201447527, |
| "grad_norm": 0.2811976684931462, |
| "learning_rate": 2.25201072386059e-05, |
| "loss": 0.3497, |
| "step": 1479 |
| }, |
| { |
| "epoch": 1.7852834740651389, |
| "grad_norm": 0.27610607298870293, |
| "learning_rate": 2.2497765862377123e-05, |
| "loss": 0.3659, |
| "step": 1480 |
| }, |
| { |
| "epoch": 1.7864897466827503, |
| "grad_norm": 0.3164778858830116, |
| "learning_rate": 2.2475424486148347e-05, |
| "loss": 0.3393, |
| "step": 1481 |
| }, |
| { |
| "epoch": 1.787696019300362, |
| "grad_norm": 0.3440205777362051, |
| "learning_rate": 2.2453083109919574e-05, |
| "loss": 0.3766, |
| "step": 1482 |
| }, |
| { |
| "epoch": 1.7889022919179736, |
| "grad_norm": 0.29128877422979677, |
| "learning_rate": 2.2430741733690795e-05, |
| "loss": 0.346, |
| "step": 1483 |
| }, |
| { |
| "epoch": 1.790108564535585, |
| "grad_norm": 0.2997901031965129, |
| "learning_rate": 2.240840035746202e-05, |
| "loss": 0.3439, |
| "step": 1484 |
| }, |
| { |
| "epoch": 1.7913148371531966, |
| "grad_norm": 0.31411546633151705, |
| "learning_rate": 2.2386058981233247e-05, |
| "loss": 0.3642, |
| "step": 1485 |
| }, |
| { |
| "epoch": 1.7925211097708083, |
| "grad_norm": 0.2954643791110668, |
| "learning_rate": 2.236371760500447e-05, |
| "loss": 0.3816, |
| "step": 1486 |
| }, |
| { |
| "epoch": 1.7937273823884197, |
| "grad_norm": 0.3080224579128579, |
| "learning_rate": 2.234137622877569e-05, |
| "loss": 0.3842, |
| "step": 1487 |
| }, |
| { |
| "epoch": 1.7949336550060315, |
| "grad_norm": 0.25606441087244014, |
| "learning_rate": 2.231903485254692e-05, |
| "loss": 0.3758, |
| "step": 1488 |
| }, |
| { |
| "epoch": 1.796139927623643, |
| "grad_norm": 0.3150542138350638, |
| "learning_rate": 2.2296693476318143e-05, |
| "loss": 0.3692, |
| "step": 1489 |
| }, |
| { |
| "epoch": 1.7973462002412546, |
| "grad_norm": 0.26939799555931804, |
| "learning_rate": 2.2274352100089367e-05, |
| "loss": 0.3304, |
| "step": 1490 |
| }, |
| { |
| "epoch": 1.7985524728588662, |
| "grad_norm": 0.2880747492641311, |
| "learning_rate": 2.225201072386059e-05, |
| "loss": 0.3635, |
| "step": 1491 |
| }, |
| { |
| "epoch": 1.7997587454764776, |
| "grad_norm": 0.27399275585186184, |
| "learning_rate": 2.2229669347631815e-05, |
| "loss": 0.3713, |
| "step": 1492 |
| }, |
| { |
| "epoch": 1.8009650180940893, |
| "grad_norm": 0.2457588382002816, |
| "learning_rate": 2.220732797140304e-05, |
| "loss": 0.3594, |
| "step": 1493 |
| }, |
| { |
| "epoch": 1.802171290711701, |
| "grad_norm": 0.2821381653581902, |
| "learning_rate": 2.2184986595174263e-05, |
| "loss": 0.3579, |
| "step": 1494 |
| }, |
| { |
| "epoch": 1.8033775633293123, |
| "grad_norm": 0.26906678345859164, |
| "learning_rate": 2.2162645218945488e-05, |
| "loss": 0.3704, |
| "step": 1495 |
| }, |
| { |
| "epoch": 1.804583835946924, |
| "grad_norm": 0.28860333792004156, |
| "learning_rate": 2.214030384271671e-05, |
| "loss": 0.3828, |
| "step": 1496 |
| }, |
| { |
| "epoch": 1.8057901085645356, |
| "grad_norm": 0.2941151374887319, |
| "learning_rate": 2.211796246648794e-05, |
| "loss": 0.3559, |
| "step": 1497 |
| }, |
| { |
| "epoch": 1.806996381182147, |
| "grad_norm": 0.33755789851920004, |
| "learning_rate": 2.209562109025916e-05, |
| "loss": 0.3444, |
| "step": 1498 |
| }, |
| { |
| "epoch": 1.8082026537997589, |
| "grad_norm": 0.3153818457382501, |
| "learning_rate": 2.2073279714030384e-05, |
| "loss": 0.3315, |
| "step": 1499 |
| }, |
| { |
| "epoch": 1.8094089264173703, |
| "grad_norm": 0.27122267879244044, |
| "learning_rate": 2.205093833780161e-05, |
| "loss": 0.3321, |
| "step": 1500 |
| }, |
| { |
| "epoch": 1.810615199034982, |
| "grad_norm": 0.36408451541394626, |
| "learning_rate": 2.2028596961572835e-05, |
| "loss": 0.3577, |
| "step": 1501 |
| }, |
| { |
| "epoch": 1.8118214716525936, |
| "grad_norm": 0.3048366478040056, |
| "learning_rate": 2.2006255585344056e-05, |
| "loss": 0.363, |
| "step": 1502 |
| }, |
| { |
| "epoch": 1.813027744270205, |
| "grad_norm": 0.289384365836586, |
| "learning_rate": 2.1983914209115284e-05, |
| "loss": 0.3476, |
| "step": 1503 |
| }, |
| { |
| "epoch": 1.8142340168878166, |
| "grad_norm": 0.3119269637358593, |
| "learning_rate": 2.1961572832886508e-05, |
| "loss": 0.3406, |
| "step": 1504 |
| }, |
| { |
| "epoch": 1.8154402895054282, |
| "grad_norm": 0.3357944937521883, |
| "learning_rate": 2.193923145665773e-05, |
| "loss": 0.3678, |
| "step": 1505 |
| }, |
| { |
| "epoch": 1.8166465621230397, |
| "grad_norm": 0.3277816414905101, |
| "learning_rate": 2.1916890080428956e-05, |
| "loss": 0.3711, |
| "step": 1506 |
| }, |
| { |
| "epoch": 1.8178528347406515, |
| "grad_norm": 0.29928477659654407, |
| "learning_rate": 2.189454870420018e-05, |
| "loss": 0.3408, |
| "step": 1507 |
| }, |
| { |
| "epoch": 1.819059107358263, |
| "grad_norm": 0.29004649198090976, |
| "learning_rate": 2.1872207327971404e-05, |
| "loss": 0.3461, |
| "step": 1508 |
| }, |
| { |
| "epoch": 1.8202653799758746, |
| "grad_norm": 0.25481916180153474, |
| "learning_rate": 2.1849865951742628e-05, |
| "loss": 0.3554, |
| "step": 1509 |
| }, |
| { |
| "epoch": 1.8214716525934862, |
| "grad_norm": 0.3206381774906527, |
| "learning_rate": 2.1827524575513852e-05, |
| "loss": 0.3654, |
| "step": 1510 |
| }, |
| { |
| "epoch": 1.8226779252110976, |
| "grad_norm": 0.27464477113928465, |
| "learning_rate": 2.1805183199285076e-05, |
| "loss": 0.3686, |
| "step": 1511 |
| }, |
| { |
| "epoch": 1.8238841978287093, |
| "grad_norm": 0.2978283923505883, |
| "learning_rate": 2.17828418230563e-05, |
| "loss": 0.3809, |
| "step": 1512 |
| }, |
| { |
| "epoch": 1.825090470446321, |
| "grad_norm": 0.3018397915218147, |
| "learning_rate": 2.1760500446827525e-05, |
| "loss": 0.3604, |
| "step": 1513 |
| }, |
| { |
| "epoch": 1.8262967430639323, |
| "grad_norm": 0.32003093006503486, |
| "learning_rate": 2.173815907059875e-05, |
| "loss": 0.3503, |
| "step": 1514 |
| }, |
| { |
| "epoch": 1.8275030156815442, |
| "grad_norm": 0.26271586653183376, |
| "learning_rate": 2.1715817694369976e-05, |
| "loss": 0.3615, |
| "step": 1515 |
| }, |
| { |
| "epoch": 1.8287092882991556, |
| "grad_norm": 0.34758559596839306, |
| "learning_rate": 2.1693476318141197e-05, |
| "loss": 0.3679, |
| "step": 1516 |
| }, |
| { |
| "epoch": 1.8299155609167672, |
| "grad_norm": 0.28304707959172154, |
| "learning_rate": 2.167113494191242e-05, |
| "loss": 0.3559, |
| "step": 1517 |
| }, |
| { |
| "epoch": 1.8311218335343789, |
| "grad_norm": 0.30716182879162085, |
| "learning_rate": 2.164879356568365e-05, |
| "loss": 0.3705, |
| "step": 1518 |
| }, |
| { |
| "epoch": 1.8323281061519903, |
| "grad_norm": 0.3272103551415873, |
| "learning_rate": 2.1626452189454872e-05, |
| "loss": 0.3557, |
| "step": 1519 |
| }, |
| { |
| "epoch": 1.833534378769602, |
| "grad_norm": 0.2856664835977563, |
| "learning_rate": 2.1604110813226093e-05, |
| "loss": 0.354, |
| "step": 1520 |
| }, |
| { |
| "epoch": 1.8347406513872135, |
| "grad_norm": 0.2995470532032037, |
| "learning_rate": 2.158176943699732e-05, |
| "loss": 0.3542, |
| "step": 1521 |
| }, |
| { |
| "epoch": 1.835946924004825, |
| "grad_norm": 0.3198503981887153, |
| "learning_rate": 2.1559428060768545e-05, |
| "loss": 0.3645, |
| "step": 1522 |
| }, |
| { |
| "epoch": 1.8371531966224368, |
| "grad_norm": 0.32754560915720893, |
| "learning_rate": 2.153708668453977e-05, |
| "loss": 0.3551, |
| "step": 1523 |
| }, |
| { |
| "epoch": 1.8383594692400482, |
| "grad_norm": 0.26948473809778084, |
| "learning_rate": 2.1514745308310993e-05, |
| "loss": 0.3604, |
| "step": 1524 |
| }, |
| { |
| "epoch": 1.8395657418576599, |
| "grad_norm": 0.2926288255208596, |
| "learning_rate": 2.1492403932082217e-05, |
| "loss": 0.3455, |
| "step": 1525 |
| }, |
| { |
| "epoch": 1.8407720144752715, |
| "grad_norm": 0.34964762071460354, |
| "learning_rate": 2.1470062555853444e-05, |
| "loss": 0.3513, |
| "step": 1526 |
| }, |
| { |
| "epoch": 1.841978287092883, |
| "grad_norm": 0.2592691589602463, |
| "learning_rate": 2.1447721179624665e-05, |
| "loss": 0.3591, |
| "step": 1527 |
| }, |
| { |
| "epoch": 1.8431845597104946, |
| "grad_norm": 0.309967263730365, |
| "learning_rate": 2.142537980339589e-05, |
| "loss": 0.3876, |
| "step": 1528 |
| }, |
| { |
| "epoch": 1.8443908323281062, |
| "grad_norm": 0.28029506099906043, |
| "learning_rate": 2.1403038427167117e-05, |
| "loss": 0.3583, |
| "step": 1529 |
| }, |
| { |
| "epoch": 1.8455971049457176, |
| "grad_norm": 0.26805452677117375, |
| "learning_rate": 2.138069705093834e-05, |
| "loss": 0.3466, |
| "step": 1530 |
| }, |
| { |
| "epoch": 1.8468033775633295, |
| "grad_norm": 0.2994299990919435, |
| "learning_rate": 2.135835567470956e-05, |
| "loss": 0.356, |
| "step": 1531 |
| }, |
| { |
| "epoch": 1.8480096501809409, |
| "grad_norm": 0.2731266768809756, |
| "learning_rate": 2.133601429848079e-05, |
| "loss": 0.363, |
| "step": 1532 |
| }, |
| { |
| "epoch": 1.8492159227985525, |
| "grad_norm": 0.32771389667540246, |
| "learning_rate": 2.1313672922252013e-05, |
| "loss": 0.3568, |
| "step": 1533 |
| }, |
| { |
| "epoch": 1.8504221954161642, |
| "grad_norm": 0.3573609136221111, |
| "learning_rate": 2.1291331546023234e-05, |
| "loss": 0.3957, |
| "step": 1534 |
| }, |
| { |
| "epoch": 1.8516284680337756, |
| "grad_norm": 0.3162680716287011, |
| "learning_rate": 2.126899016979446e-05, |
| "loss": 0.3535, |
| "step": 1535 |
| }, |
| { |
| "epoch": 1.8528347406513872, |
| "grad_norm": 0.295455790097431, |
| "learning_rate": 2.1246648793565685e-05, |
| "loss": 0.3545, |
| "step": 1536 |
| }, |
| { |
| "epoch": 1.8540410132689988, |
| "grad_norm": 0.26045858915660014, |
| "learning_rate": 2.122430741733691e-05, |
| "loss": 0.3456, |
| "step": 1537 |
| }, |
| { |
| "epoch": 1.8552472858866103, |
| "grad_norm": 0.31789979304930077, |
| "learning_rate": 2.1201966041108133e-05, |
| "loss": 0.3811, |
| "step": 1538 |
| }, |
| { |
| "epoch": 1.8564535585042221, |
| "grad_norm": 0.2458230363735759, |
| "learning_rate": 2.1179624664879358e-05, |
| "loss": 0.3498, |
| "step": 1539 |
| }, |
| { |
| "epoch": 1.8576598311218335, |
| "grad_norm": 0.31583664687268614, |
| "learning_rate": 2.115728328865058e-05, |
| "loss": 0.3609, |
| "step": 1540 |
| }, |
| { |
| "epoch": 1.8588661037394452, |
| "grad_norm": 0.2777923125909325, |
| "learning_rate": 2.1134941912421806e-05, |
| "loss": 0.3599, |
| "step": 1541 |
| }, |
| { |
| "epoch": 1.8600723763570568, |
| "grad_norm": 0.2996705803905586, |
| "learning_rate": 2.111260053619303e-05, |
| "loss": 0.3542, |
| "step": 1542 |
| }, |
| { |
| "epoch": 1.8612786489746682, |
| "grad_norm": 0.23278188821193466, |
| "learning_rate": 2.1090259159964254e-05, |
| "loss": 0.3408, |
| "step": 1543 |
| }, |
| { |
| "epoch": 1.8624849215922799, |
| "grad_norm": 0.32721942024267, |
| "learning_rate": 2.106791778373548e-05, |
| "loss": 0.3465, |
| "step": 1544 |
| }, |
| { |
| "epoch": 1.8636911942098915, |
| "grad_norm": 0.29620060616140187, |
| "learning_rate": 2.1045576407506702e-05, |
| "loss": 0.3483, |
| "step": 1545 |
| }, |
| { |
| "epoch": 1.864897466827503, |
| "grad_norm": 0.2833126537587724, |
| "learning_rate": 2.1023235031277926e-05, |
| "loss": 0.363, |
| "step": 1546 |
| }, |
| { |
| "epoch": 1.8661037394451148, |
| "grad_norm": 0.27966605358297153, |
| "learning_rate": 2.1000893655049154e-05, |
| "loss": 0.383, |
| "step": 1547 |
| }, |
| { |
| "epoch": 1.8673100120627262, |
| "grad_norm": 0.2953507697070746, |
| "learning_rate": 2.0978552278820378e-05, |
| "loss": 0.352, |
| "step": 1548 |
| }, |
| { |
| "epoch": 1.8685162846803376, |
| "grad_norm": 0.3118445272043699, |
| "learning_rate": 2.09562109025916e-05, |
| "loss": 0.361, |
| "step": 1549 |
| }, |
| { |
| "epoch": 1.8697225572979495, |
| "grad_norm": 0.27569695544577993, |
| "learning_rate": 2.0933869526362826e-05, |
| "loss": 0.3705, |
| "step": 1550 |
| }, |
| { |
| "epoch": 1.8709288299155609, |
| "grad_norm": 0.2931446668444685, |
| "learning_rate": 2.091152815013405e-05, |
| "loss": 0.3517, |
| "step": 1551 |
| }, |
| { |
| "epoch": 1.8721351025331725, |
| "grad_norm": 0.28741574092692435, |
| "learning_rate": 2.0889186773905274e-05, |
| "loss": 0.3482, |
| "step": 1552 |
| }, |
| { |
| "epoch": 1.8733413751507841, |
| "grad_norm": 0.33952287545856796, |
| "learning_rate": 2.0866845397676498e-05, |
| "loss": 0.3646, |
| "step": 1553 |
| }, |
| { |
| "epoch": 1.8745476477683956, |
| "grad_norm": 0.25889542403249843, |
| "learning_rate": 2.0844504021447722e-05, |
| "loss": 0.3625, |
| "step": 1554 |
| }, |
| { |
| "epoch": 1.8757539203860072, |
| "grad_norm": 0.2779529963292999, |
| "learning_rate": 2.0822162645218946e-05, |
| "loss": 0.3483, |
| "step": 1555 |
| }, |
| { |
| "epoch": 1.8769601930036188, |
| "grad_norm": 0.27128966596000303, |
| "learning_rate": 2.079982126899017e-05, |
| "loss": 0.352, |
| "step": 1556 |
| }, |
| { |
| "epoch": 1.8781664656212302, |
| "grad_norm": 0.2902910048214581, |
| "learning_rate": 2.0777479892761395e-05, |
| "loss": 0.3723, |
| "step": 1557 |
| }, |
| { |
| "epoch": 1.879372738238842, |
| "grad_norm": 0.27973862245194236, |
| "learning_rate": 2.075513851653262e-05, |
| "loss": 0.3743, |
| "step": 1558 |
| }, |
| { |
| "epoch": 1.8805790108564535, |
| "grad_norm": 0.30609968166421897, |
| "learning_rate": 2.0732797140303846e-05, |
| "loss": 0.3731, |
| "step": 1559 |
| }, |
| { |
| "epoch": 1.8817852834740652, |
| "grad_norm": 0.2728618671983489, |
| "learning_rate": 2.0710455764075067e-05, |
| "loss": 0.3617, |
| "step": 1560 |
| }, |
| { |
| "epoch": 1.8829915560916768, |
| "grad_norm": 0.2543254562059058, |
| "learning_rate": 2.068811438784629e-05, |
| "loss": 0.3436, |
| "step": 1561 |
| }, |
| { |
| "epoch": 1.8841978287092882, |
| "grad_norm": 0.24841468175025513, |
| "learning_rate": 2.066577301161752e-05, |
| "loss": 0.3522, |
| "step": 1562 |
| }, |
| { |
| "epoch": 1.8854041013268998, |
| "grad_norm": 0.31768402839745774, |
| "learning_rate": 2.0643431635388742e-05, |
| "loss": 0.3736, |
| "step": 1563 |
| }, |
| { |
| "epoch": 1.8866103739445115, |
| "grad_norm": 0.3191797777615365, |
| "learning_rate": 2.0621090259159963e-05, |
| "loss": 0.3766, |
| "step": 1564 |
| }, |
| { |
| "epoch": 1.887816646562123, |
| "grad_norm": 0.272658079681039, |
| "learning_rate": 2.059874888293119e-05, |
| "loss": 0.3611, |
| "step": 1565 |
| }, |
| { |
| "epoch": 1.8890229191797347, |
| "grad_norm": 0.2622171628236883, |
| "learning_rate": 2.0576407506702415e-05, |
| "loss": 0.3444, |
| "step": 1566 |
| }, |
| { |
| "epoch": 1.8902291917973462, |
| "grad_norm": 0.31135421295399496, |
| "learning_rate": 2.0554066130473635e-05, |
| "loss": 0.3534, |
| "step": 1567 |
| }, |
| { |
| "epoch": 1.8914354644149578, |
| "grad_norm": 0.2777552393153191, |
| "learning_rate": 2.0531724754244863e-05, |
| "loss": 0.3451, |
| "step": 1568 |
| }, |
| { |
| "epoch": 1.8926417370325694, |
| "grad_norm": 0.2808254408248436, |
| "learning_rate": 2.0509383378016087e-05, |
| "loss": 0.3711, |
| "step": 1569 |
| }, |
| { |
| "epoch": 1.8938480096501809, |
| "grad_norm": 0.2629707060553636, |
| "learning_rate": 2.048704200178731e-05, |
| "loss": 0.3536, |
| "step": 1570 |
| }, |
| { |
| "epoch": 1.8950542822677925, |
| "grad_norm": 0.284968406394402, |
| "learning_rate": 2.0464700625558535e-05, |
| "loss": 0.3478, |
| "step": 1571 |
| }, |
| { |
| "epoch": 1.8962605548854041, |
| "grad_norm": 0.277964729242244, |
| "learning_rate": 2.044235924932976e-05, |
| "loss": 0.3522, |
| "step": 1572 |
| }, |
| { |
| "epoch": 1.8974668275030155, |
| "grad_norm": 0.28027713264841336, |
| "learning_rate": 2.0420017873100983e-05, |
| "loss": 0.3589, |
| "step": 1573 |
| }, |
| { |
| "epoch": 1.8986731001206274, |
| "grad_norm": 0.3050543089008853, |
| "learning_rate": 2.0397676496872207e-05, |
| "loss": 0.3737, |
| "step": 1574 |
| }, |
| { |
| "epoch": 1.8998793727382388, |
| "grad_norm": 0.333573721833168, |
| "learning_rate": 2.037533512064343e-05, |
| "loss": 0.3723, |
| "step": 1575 |
| }, |
| { |
| "epoch": 1.9010856453558505, |
| "grad_norm": 0.27558632494530466, |
| "learning_rate": 2.0352993744414656e-05, |
| "loss": 0.347, |
| "step": 1576 |
| }, |
| { |
| "epoch": 1.902291917973462, |
| "grad_norm": 0.2749496186635863, |
| "learning_rate": 2.0330652368185883e-05, |
| "loss": 0.3584, |
| "step": 1577 |
| }, |
| { |
| "epoch": 1.9034981905910735, |
| "grad_norm": 0.27623036751877184, |
| "learning_rate": 2.0308310991957104e-05, |
| "loss": 0.3671, |
| "step": 1578 |
| }, |
| { |
| "epoch": 1.9047044632086851, |
| "grad_norm": 0.35924451163298154, |
| "learning_rate": 2.0285969615728328e-05, |
| "loss": 0.3551, |
| "step": 1579 |
| }, |
| { |
| "epoch": 1.9059107358262968, |
| "grad_norm": 0.2809181345145692, |
| "learning_rate": 2.0263628239499555e-05, |
| "loss": 0.3502, |
| "step": 1580 |
| }, |
| { |
| "epoch": 1.9071170084439082, |
| "grad_norm": 0.25879235314860666, |
| "learning_rate": 2.024128686327078e-05, |
| "loss": 0.3405, |
| "step": 1581 |
| }, |
| { |
| "epoch": 1.90832328106152, |
| "grad_norm": 0.2849966409585352, |
| "learning_rate": 2.0218945487042e-05, |
| "loss": 0.3378, |
| "step": 1582 |
| }, |
| { |
| "epoch": 1.9095295536791315, |
| "grad_norm": 0.3372371097579718, |
| "learning_rate": 2.0196604110813228e-05, |
| "loss": 0.3517, |
| "step": 1583 |
| }, |
| { |
| "epoch": 1.910735826296743, |
| "grad_norm": 0.25632078946719644, |
| "learning_rate": 2.0174262734584452e-05, |
| "loss": 0.3443, |
| "step": 1584 |
| }, |
| { |
| "epoch": 1.9119420989143547, |
| "grad_norm": 0.26956940258980094, |
| "learning_rate": 2.0151921358355676e-05, |
| "loss": 0.3401, |
| "step": 1585 |
| }, |
| { |
| "epoch": 1.9131483715319662, |
| "grad_norm": 0.26515149174118896, |
| "learning_rate": 2.01295799821269e-05, |
| "loss": 0.3523, |
| "step": 1586 |
| }, |
| { |
| "epoch": 1.9143546441495778, |
| "grad_norm": 0.2613492785001853, |
| "learning_rate": 2.0107238605898124e-05, |
| "loss": 0.3739, |
| "step": 1587 |
| }, |
| { |
| "epoch": 1.9155609167671894, |
| "grad_norm": 0.23950143457883746, |
| "learning_rate": 2.008489722966935e-05, |
| "loss": 0.3343, |
| "step": 1588 |
| }, |
| { |
| "epoch": 1.9167671893848008, |
| "grad_norm": 0.30117376449701433, |
| "learning_rate": 2.0062555853440572e-05, |
| "loss": 0.3709, |
| "step": 1589 |
| }, |
| { |
| "epoch": 1.9179734620024127, |
| "grad_norm": 0.2593161624873031, |
| "learning_rate": 2.0040214477211796e-05, |
| "loss": 0.3452, |
| "step": 1590 |
| }, |
| { |
| "epoch": 1.9191797346200241, |
| "grad_norm": 0.2407521061083514, |
| "learning_rate": 2.0017873100983024e-05, |
| "loss": 0.3735, |
| "step": 1591 |
| }, |
| { |
| "epoch": 1.9203860072376358, |
| "grad_norm": 0.26195291267250836, |
| "learning_rate": 1.9995531724754248e-05, |
| "loss": 0.3509, |
| "step": 1592 |
| }, |
| { |
| "epoch": 1.9215922798552474, |
| "grad_norm": 0.297432231962128, |
| "learning_rate": 1.997319034852547e-05, |
| "loss": 0.3442, |
| "step": 1593 |
| }, |
| { |
| "epoch": 1.9227985524728588, |
| "grad_norm": 0.2399537948274586, |
| "learning_rate": 1.9950848972296696e-05, |
| "loss": 0.3471, |
| "step": 1594 |
| }, |
| { |
| "epoch": 1.9240048250904704, |
| "grad_norm": 0.28295475054379304, |
| "learning_rate": 1.992850759606792e-05, |
| "loss": 0.3615, |
| "step": 1595 |
| }, |
| { |
| "epoch": 1.925211097708082, |
| "grad_norm": 0.2894150589928256, |
| "learning_rate": 1.990616621983914e-05, |
| "loss": 0.3744, |
| "step": 1596 |
| }, |
| { |
| "epoch": 1.9264173703256935, |
| "grad_norm": 0.27749447033260927, |
| "learning_rate": 1.9883824843610368e-05, |
| "loss": 0.363, |
| "step": 1597 |
| }, |
| { |
| "epoch": 1.9276236429433053, |
| "grad_norm": 0.28672944617778745, |
| "learning_rate": 1.9861483467381592e-05, |
| "loss": 0.3747, |
| "step": 1598 |
| }, |
| { |
| "epoch": 1.9288299155609168, |
| "grad_norm": 0.2847048913976895, |
| "learning_rate": 1.9839142091152816e-05, |
| "loss": 0.3405, |
| "step": 1599 |
| }, |
| { |
| "epoch": 1.9300361881785284, |
| "grad_norm": 0.28224450714587446, |
| "learning_rate": 1.981680071492404e-05, |
| "loss": 0.369, |
| "step": 1600 |
| }, |
| { |
| "epoch": 1.93124246079614, |
| "grad_norm": 0.251007009158062, |
| "learning_rate": 1.9794459338695265e-05, |
| "loss": 0.3583, |
| "step": 1601 |
| }, |
| { |
| "epoch": 1.9324487334137515, |
| "grad_norm": 0.2700503054912305, |
| "learning_rate": 1.977211796246649e-05, |
| "loss": 0.3487, |
| "step": 1602 |
| }, |
| { |
| "epoch": 1.933655006031363, |
| "grad_norm": 0.26601499415202334, |
| "learning_rate": 1.9749776586237713e-05, |
| "loss": 0.356, |
| "step": 1603 |
| }, |
| { |
| "epoch": 1.9348612786489747, |
| "grad_norm": 0.26517746425234484, |
| "learning_rate": 1.9727435210008937e-05, |
| "loss": 0.3751, |
| "step": 1604 |
| }, |
| { |
| "epoch": 1.9360675512665861, |
| "grad_norm": 0.3075644038798218, |
| "learning_rate": 1.970509383378016e-05, |
| "loss": 0.3865, |
| "step": 1605 |
| }, |
| { |
| "epoch": 1.9372738238841978, |
| "grad_norm": 0.2748549582323955, |
| "learning_rate": 1.968275245755139e-05, |
| "loss": 0.3537, |
| "step": 1606 |
| }, |
| { |
| "epoch": 1.9384800965018094, |
| "grad_norm": 0.2595112570755194, |
| "learning_rate": 1.966041108132261e-05, |
| "loss": 0.3576, |
| "step": 1607 |
| }, |
| { |
| "epoch": 1.9396863691194208, |
| "grad_norm": 0.2512201382268573, |
| "learning_rate": 1.9638069705093833e-05, |
| "loss": 0.3417, |
| "step": 1608 |
| }, |
| { |
| "epoch": 1.9408926417370327, |
| "grad_norm": 0.3057741641131344, |
| "learning_rate": 1.961572832886506e-05, |
| "loss": 0.3531, |
| "step": 1609 |
| }, |
| { |
| "epoch": 1.942098914354644, |
| "grad_norm": 0.2995428068053764, |
| "learning_rate": 1.9593386952636285e-05, |
| "loss": 0.3577, |
| "step": 1610 |
| }, |
| { |
| "epoch": 1.9433051869722557, |
| "grad_norm": 0.29519013006895595, |
| "learning_rate": 1.9571045576407505e-05, |
| "loss": 0.3645, |
| "step": 1611 |
| }, |
| { |
| "epoch": 1.9445114595898674, |
| "grad_norm": 0.31079328371619636, |
| "learning_rate": 1.9548704200178733e-05, |
| "loss": 0.3765, |
| "step": 1612 |
| }, |
| { |
| "epoch": 1.9457177322074788, |
| "grad_norm": 0.31234042894752406, |
| "learning_rate": 1.9526362823949957e-05, |
| "loss": 0.3538, |
| "step": 1613 |
| }, |
| { |
| "epoch": 1.9469240048250904, |
| "grad_norm": 0.28659000754418135, |
| "learning_rate": 1.950402144772118e-05, |
| "loss": 0.3731, |
| "step": 1614 |
| }, |
| { |
| "epoch": 1.948130277442702, |
| "grad_norm": 0.264814616626347, |
| "learning_rate": 1.9481680071492405e-05, |
| "loss": 0.3659, |
| "step": 1615 |
| }, |
| { |
| "epoch": 1.9493365500603135, |
| "grad_norm": 0.24366644264153936, |
| "learning_rate": 1.945933869526363e-05, |
| "loss": 0.3673, |
| "step": 1616 |
| }, |
| { |
| "epoch": 1.9505428226779253, |
| "grad_norm": 0.272103653808616, |
| "learning_rate": 1.9436997319034853e-05, |
| "loss": 0.3507, |
| "step": 1617 |
| }, |
| { |
| "epoch": 1.9517490952955368, |
| "grad_norm": 0.30326138219958865, |
| "learning_rate": 1.9414655942806077e-05, |
| "loss": 0.3673, |
| "step": 1618 |
| }, |
| { |
| "epoch": 1.9529553679131484, |
| "grad_norm": 0.2471852651757173, |
| "learning_rate": 1.93923145665773e-05, |
| "loss": 0.3429, |
| "step": 1619 |
| }, |
| { |
| "epoch": 1.95416164053076, |
| "grad_norm": 0.29823029929658096, |
| "learning_rate": 1.9369973190348526e-05, |
| "loss": 0.3498, |
| "step": 1620 |
| }, |
| { |
| "epoch": 1.9553679131483714, |
| "grad_norm": 0.25618786812647487, |
| "learning_rate": 1.9347631814119753e-05, |
| "loss": 0.3602, |
| "step": 1621 |
| }, |
| { |
| "epoch": 1.956574185765983, |
| "grad_norm": 0.2626954040000901, |
| "learning_rate": 1.9325290437890974e-05, |
| "loss": 0.3548, |
| "step": 1622 |
| }, |
| { |
| "epoch": 1.9577804583835947, |
| "grad_norm": 0.30278639551876474, |
| "learning_rate": 1.9302949061662198e-05, |
| "loss": 0.3602, |
| "step": 1623 |
| }, |
| { |
| "epoch": 1.9589867310012061, |
| "grad_norm": 0.32422885066788054, |
| "learning_rate": 1.9280607685433425e-05, |
| "loss": 0.3536, |
| "step": 1624 |
| }, |
| { |
| "epoch": 1.960193003618818, |
| "grad_norm": 0.25780893788890336, |
| "learning_rate": 1.9258266309204646e-05, |
| "loss": 0.3448, |
| "step": 1625 |
| }, |
| { |
| "epoch": 1.9613992762364294, |
| "grad_norm": 0.2701194515692144, |
| "learning_rate": 1.923592493297587e-05, |
| "loss": 0.3685, |
| "step": 1626 |
| }, |
| { |
| "epoch": 1.962605548854041, |
| "grad_norm": 0.2637933151314994, |
| "learning_rate": 1.9213583556747098e-05, |
| "loss": 0.3515, |
| "step": 1627 |
| }, |
| { |
| "epoch": 1.9638118214716527, |
| "grad_norm": 0.2934963853487863, |
| "learning_rate": 1.9191242180518322e-05, |
| "loss": 0.3742, |
| "step": 1628 |
| }, |
| { |
| "epoch": 1.965018094089264, |
| "grad_norm": 0.22910011332182156, |
| "learning_rate": 1.9168900804289542e-05, |
| "loss": 0.3647, |
| "step": 1629 |
| }, |
| { |
| "epoch": 1.9662243667068757, |
| "grad_norm": 0.3393223178431687, |
| "learning_rate": 1.914655942806077e-05, |
| "loss": 0.3479, |
| "step": 1630 |
| }, |
| { |
| "epoch": 1.9674306393244874, |
| "grad_norm": 0.27703033721697085, |
| "learning_rate": 1.9124218051831994e-05, |
| "loss": 0.3809, |
| "step": 1631 |
| }, |
| { |
| "epoch": 1.9686369119420988, |
| "grad_norm": 0.3208522052589231, |
| "learning_rate": 1.9101876675603218e-05, |
| "loss": 0.3681, |
| "step": 1632 |
| }, |
| { |
| "epoch": 1.9698431845597106, |
| "grad_norm": 0.27699992821466946, |
| "learning_rate": 1.9079535299374442e-05, |
| "loss": 0.3516, |
| "step": 1633 |
| }, |
| { |
| "epoch": 1.971049457177322, |
| "grad_norm": 0.2712403728322839, |
| "learning_rate": 1.9057193923145666e-05, |
| "loss": 0.3423, |
| "step": 1634 |
| }, |
| { |
| "epoch": 1.9722557297949337, |
| "grad_norm": 0.2855332321421069, |
| "learning_rate": 1.903485254691689e-05, |
| "loss": 0.3318, |
| "step": 1635 |
| }, |
| { |
| "epoch": 1.9734620024125453, |
| "grad_norm": 0.2815389249603608, |
| "learning_rate": 1.9012511170688114e-05, |
| "loss": 0.3459, |
| "step": 1636 |
| }, |
| { |
| "epoch": 1.9746682750301567, |
| "grad_norm": 0.28182396775800955, |
| "learning_rate": 1.899016979445934e-05, |
| "loss": 0.3384, |
| "step": 1637 |
| }, |
| { |
| "epoch": 1.9758745476477684, |
| "grad_norm": 0.2718840146128206, |
| "learning_rate": 1.8967828418230563e-05, |
| "loss": 0.3609, |
| "step": 1638 |
| }, |
| { |
| "epoch": 1.97708082026538, |
| "grad_norm": 0.2591947787566494, |
| "learning_rate": 1.894548704200179e-05, |
| "loss": 0.3428, |
| "step": 1639 |
| }, |
| { |
| "epoch": 1.9782870928829914, |
| "grad_norm": 0.32383105243939264, |
| "learning_rate": 1.892314566577301e-05, |
| "loss": 0.3647, |
| "step": 1640 |
| }, |
| { |
| "epoch": 1.9794933655006033, |
| "grad_norm": 0.2563990858898975, |
| "learning_rate": 1.8900804289544235e-05, |
| "loss": 0.3484, |
| "step": 1641 |
| }, |
| { |
| "epoch": 1.9806996381182147, |
| "grad_norm": 0.3105355894461767, |
| "learning_rate": 1.8878462913315462e-05, |
| "loss": 0.3959, |
| "step": 1642 |
| }, |
| { |
| "epoch": 1.9819059107358263, |
| "grad_norm": 0.301088060007025, |
| "learning_rate": 1.8856121537086686e-05, |
| "loss": 0.3333, |
| "step": 1643 |
| }, |
| { |
| "epoch": 1.983112183353438, |
| "grad_norm": 0.28554630545995097, |
| "learning_rate": 1.8833780160857907e-05, |
| "loss": 0.3487, |
| "step": 1644 |
| }, |
| { |
| "epoch": 1.9843184559710494, |
| "grad_norm": 0.2602079469425586, |
| "learning_rate": 1.8811438784629135e-05, |
| "loss": 0.349, |
| "step": 1645 |
| }, |
| { |
| "epoch": 1.985524728588661, |
| "grad_norm": 0.2923498070190884, |
| "learning_rate": 1.878909740840036e-05, |
| "loss": 0.374, |
| "step": 1646 |
| }, |
| { |
| "epoch": 1.9867310012062727, |
| "grad_norm": 0.3420070267678662, |
| "learning_rate": 1.8766756032171583e-05, |
| "loss": 0.363, |
| "step": 1647 |
| }, |
| { |
| "epoch": 1.987937273823884, |
| "grad_norm": 0.2706072187767052, |
| "learning_rate": 1.8744414655942807e-05, |
| "loss": 0.3674, |
| "step": 1648 |
| }, |
| { |
| "epoch": 1.989143546441496, |
| "grad_norm": 0.33498382557114115, |
| "learning_rate": 1.872207327971403e-05, |
| "loss": 0.3637, |
| "step": 1649 |
| }, |
| { |
| "epoch": 1.9903498190591074, |
| "grad_norm": 0.3450578881904668, |
| "learning_rate": 1.869973190348526e-05, |
| "loss": 0.3471, |
| "step": 1650 |
| }, |
| { |
| "epoch": 1.991556091676719, |
| "grad_norm": 0.28093156857550655, |
| "learning_rate": 1.867739052725648e-05, |
| "loss": 0.373, |
| "step": 1651 |
| }, |
| { |
| "epoch": 1.9927623642943306, |
| "grad_norm": 0.33731372039057506, |
| "learning_rate": 1.8655049151027703e-05, |
| "loss": 0.352, |
| "step": 1652 |
| }, |
| { |
| "epoch": 1.993968636911942, |
| "grad_norm": 0.32637042812980754, |
| "learning_rate": 1.863270777479893e-05, |
| "loss": 0.3553, |
| "step": 1653 |
| }, |
| { |
| "epoch": 1.9951749095295537, |
| "grad_norm": 0.2924533816211366, |
| "learning_rate": 1.8610366398570155e-05, |
| "loss": 0.3422, |
| "step": 1654 |
| }, |
| { |
| "epoch": 1.9963811821471653, |
| "grad_norm": 0.30816680687954695, |
| "learning_rate": 1.8588025022341376e-05, |
| "loss": 0.3514, |
| "step": 1655 |
| }, |
| { |
| "epoch": 1.9975874547647767, |
| "grad_norm": 0.37414553666596123, |
| "learning_rate": 1.8565683646112603e-05, |
| "loss": 0.3375, |
| "step": 1656 |
| }, |
| { |
| "epoch": 1.9987937273823886, |
| "grad_norm": 0.25177696486470447, |
| "learning_rate": 1.8543342269883827e-05, |
| "loss": 0.3447, |
| "step": 1657 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 0.2985722047195102, |
| "learning_rate": 1.8521000893655048e-05, |
| "loss": 0.3362, |
| "step": 1658 |
| }, |
| { |
| "epoch": 2.0012062726176114, |
| "grad_norm": 0.3582654343499216, |
| "learning_rate": 1.8498659517426275e-05, |
| "loss": 0.2966, |
| "step": 1659 |
| }, |
| { |
| "epoch": 2.0024125452352233, |
| "grad_norm": 0.2510003107801239, |
| "learning_rate": 1.84763181411975e-05, |
| "loss": 0.3027, |
| "step": 1660 |
| }, |
| { |
| "epoch": 2.0036188178528347, |
| "grad_norm": 0.31848482820403823, |
| "learning_rate": 1.8453976764968723e-05, |
| "loss": 0.3072, |
| "step": 1661 |
| }, |
| { |
| "epoch": 2.004825090470446, |
| "grad_norm": 0.28121589676814773, |
| "learning_rate": 1.8431635388739948e-05, |
| "loss": 0.2799, |
| "step": 1662 |
| }, |
| { |
| "epoch": 2.006031363088058, |
| "grad_norm": 0.2790990286631224, |
| "learning_rate": 1.840929401251117e-05, |
| "loss": 0.295, |
| "step": 1663 |
| }, |
| { |
| "epoch": 2.0072376357056694, |
| "grad_norm": 0.29716421480885014, |
| "learning_rate": 1.8386952636282396e-05, |
| "loss": 0.2905, |
| "step": 1664 |
| }, |
| { |
| "epoch": 2.0084439083232812, |
| "grad_norm": 0.2876586964710323, |
| "learning_rate": 1.836461126005362e-05, |
| "loss": 0.2738, |
| "step": 1665 |
| }, |
| { |
| "epoch": 2.0096501809408926, |
| "grad_norm": 0.2847467005141539, |
| "learning_rate": 1.8342269883824844e-05, |
| "loss": 0.2744, |
| "step": 1666 |
| }, |
| { |
| "epoch": 2.010856453558504, |
| "grad_norm": 0.2848323602535861, |
| "learning_rate": 1.8319928507596068e-05, |
| "loss": 0.2937, |
| "step": 1667 |
| }, |
| { |
| "epoch": 2.012062726176116, |
| "grad_norm": 0.30619210279161174, |
| "learning_rate": 1.8297587131367295e-05, |
| "loss": 0.2945, |
| "step": 1668 |
| }, |
| { |
| "epoch": 2.0132689987937273, |
| "grad_norm": 0.29414910099799263, |
| "learning_rate": 1.8275245755138516e-05, |
| "loss": 0.2946, |
| "step": 1669 |
| }, |
| { |
| "epoch": 2.0144752714113388, |
| "grad_norm": 0.289304762173054, |
| "learning_rate": 1.825290437890974e-05, |
| "loss": 0.284, |
| "step": 1670 |
| }, |
| { |
| "epoch": 2.0156815440289506, |
| "grad_norm": 0.28576033145670227, |
| "learning_rate": 1.8230563002680968e-05, |
| "loss": 0.2867, |
| "step": 1671 |
| }, |
| { |
| "epoch": 2.016887816646562, |
| "grad_norm": 0.32373169860293655, |
| "learning_rate": 1.8208221626452192e-05, |
| "loss": 0.2995, |
| "step": 1672 |
| }, |
| { |
| "epoch": 2.018094089264174, |
| "grad_norm": 0.23638681779954598, |
| "learning_rate": 1.8185880250223413e-05, |
| "loss": 0.2878, |
| "step": 1673 |
| }, |
| { |
| "epoch": 2.0193003618817853, |
| "grad_norm": 0.28453090920113555, |
| "learning_rate": 1.816353887399464e-05, |
| "loss": 0.2945, |
| "step": 1674 |
| }, |
| { |
| "epoch": 2.0205066344993967, |
| "grad_norm": 0.2549255381981737, |
| "learning_rate": 1.8141197497765864e-05, |
| "loss": 0.2966, |
| "step": 1675 |
| }, |
| { |
| "epoch": 2.0217129071170086, |
| "grad_norm": 0.29462585796542506, |
| "learning_rate": 1.8118856121537088e-05, |
| "loss": 0.2864, |
| "step": 1676 |
| }, |
| { |
| "epoch": 2.02291917973462, |
| "grad_norm": 0.26548129330247616, |
| "learning_rate": 1.8096514745308312e-05, |
| "loss": 0.3023, |
| "step": 1677 |
| }, |
| { |
| "epoch": 2.0241254523522314, |
| "grad_norm": 0.31332310597104845, |
| "learning_rate": 1.8074173369079536e-05, |
| "loss": 0.2946, |
| "step": 1678 |
| }, |
| { |
| "epoch": 2.0253317249698433, |
| "grad_norm": 0.27338445045211085, |
| "learning_rate": 1.805183199285076e-05, |
| "loss": 0.2804, |
| "step": 1679 |
| }, |
| { |
| "epoch": 2.0265379975874547, |
| "grad_norm": 0.3118425725530232, |
| "learning_rate": 1.8029490616621985e-05, |
| "loss": 0.3007, |
| "step": 1680 |
| }, |
| { |
| "epoch": 2.0277442702050665, |
| "grad_norm": 0.27714195106597606, |
| "learning_rate": 1.800714924039321e-05, |
| "loss": 0.2884, |
| "step": 1681 |
| }, |
| { |
| "epoch": 2.028950542822678, |
| "grad_norm": 0.31958347812909327, |
| "learning_rate": 1.7984807864164433e-05, |
| "loss": 0.2999, |
| "step": 1682 |
| }, |
| { |
| "epoch": 2.0301568154402894, |
| "grad_norm": 0.29186659149763977, |
| "learning_rate": 1.796246648793566e-05, |
| "loss": 0.2916, |
| "step": 1683 |
| }, |
| { |
| "epoch": 2.0313630880579012, |
| "grad_norm": 0.2733656352448462, |
| "learning_rate": 1.794012511170688e-05, |
| "loss": 0.2765, |
| "step": 1684 |
| }, |
| { |
| "epoch": 2.0325693606755126, |
| "grad_norm": 0.28574192979858715, |
| "learning_rate": 1.7917783735478105e-05, |
| "loss": 0.311, |
| "step": 1685 |
| }, |
| { |
| "epoch": 2.033775633293124, |
| "grad_norm": 0.2991351897521293, |
| "learning_rate": 1.7895442359249332e-05, |
| "loss": 0.2852, |
| "step": 1686 |
| }, |
| { |
| "epoch": 2.034981905910736, |
| "grad_norm": 0.24236890788237953, |
| "learning_rate": 1.7873100983020553e-05, |
| "loss": 0.2931, |
| "step": 1687 |
| }, |
| { |
| "epoch": 2.0361881785283473, |
| "grad_norm": 0.2551685310046532, |
| "learning_rate": 1.7850759606791777e-05, |
| "loss": 0.2908, |
| "step": 1688 |
| }, |
| { |
| "epoch": 2.037394451145959, |
| "grad_norm": 0.2549058353261533, |
| "learning_rate": 1.7828418230563005e-05, |
| "loss": 0.2823, |
| "step": 1689 |
| }, |
| { |
| "epoch": 2.0386007237635706, |
| "grad_norm": 0.24168430653360778, |
| "learning_rate": 1.780607685433423e-05, |
| "loss": 0.2804, |
| "step": 1690 |
| }, |
| { |
| "epoch": 2.039806996381182, |
| "grad_norm": 0.24712540293178345, |
| "learning_rate": 1.778373547810545e-05, |
| "loss": 0.2889, |
| "step": 1691 |
| }, |
| { |
| "epoch": 2.041013268998794, |
| "grad_norm": 0.2584950258915421, |
| "learning_rate": 1.7761394101876677e-05, |
| "loss": 0.2979, |
| "step": 1692 |
| }, |
| { |
| "epoch": 2.0422195416164053, |
| "grad_norm": 0.2547855415817537, |
| "learning_rate": 1.77390527256479e-05, |
| "loss": 0.2936, |
| "step": 1693 |
| }, |
| { |
| "epoch": 2.0434258142340167, |
| "grad_norm": 0.24820792443315048, |
| "learning_rate": 1.7716711349419125e-05, |
| "loss": 0.2929, |
| "step": 1694 |
| }, |
| { |
| "epoch": 2.0446320868516286, |
| "grad_norm": 0.2600712986700368, |
| "learning_rate": 1.769436997319035e-05, |
| "loss": 0.2912, |
| "step": 1695 |
| }, |
| { |
| "epoch": 2.04583835946924, |
| "grad_norm": 0.2369033927502002, |
| "learning_rate": 1.7672028596961573e-05, |
| "loss": 0.2935, |
| "step": 1696 |
| }, |
| { |
| "epoch": 2.047044632086852, |
| "grad_norm": 0.24572060560713774, |
| "learning_rate": 1.7649687220732797e-05, |
| "loss": 0.2728, |
| "step": 1697 |
| }, |
| { |
| "epoch": 2.0482509047044632, |
| "grad_norm": 0.31718716534120606, |
| "learning_rate": 1.762734584450402e-05, |
| "loss": 0.2952, |
| "step": 1698 |
| }, |
| { |
| "epoch": 2.0494571773220747, |
| "grad_norm": 0.23868855770487613, |
| "learning_rate": 1.7605004468275246e-05, |
| "loss": 0.2796, |
| "step": 1699 |
| }, |
| { |
| "epoch": 2.0506634499396865, |
| "grad_norm": 0.2810826971186264, |
| "learning_rate": 1.758266309204647e-05, |
| "loss": 0.2919, |
| "step": 1700 |
| }, |
| { |
| "epoch": 2.051869722557298, |
| "grad_norm": 0.27057773712748967, |
| "learning_rate": 1.7560321715817697e-05, |
| "loss": 0.2868, |
| "step": 1701 |
| }, |
| { |
| "epoch": 2.0530759951749094, |
| "grad_norm": 0.23055869199867401, |
| "learning_rate": 1.7537980339588918e-05, |
| "loss": 0.2811, |
| "step": 1702 |
| }, |
| { |
| "epoch": 2.054282267792521, |
| "grad_norm": 0.24322673852618046, |
| "learning_rate": 1.7515638963360142e-05, |
| "loss": 0.2859, |
| "step": 1703 |
| }, |
| { |
| "epoch": 2.0554885404101326, |
| "grad_norm": 0.2750690299702216, |
| "learning_rate": 1.749329758713137e-05, |
| "loss": 0.2966, |
| "step": 1704 |
| }, |
| { |
| "epoch": 2.0566948130277445, |
| "grad_norm": 0.6931475541633155, |
| "learning_rate": 1.7470956210902594e-05, |
| "loss": 0.3079, |
| "step": 1705 |
| }, |
| { |
| "epoch": 2.057901085645356, |
| "grad_norm": 0.21463239940499598, |
| "learning_rate": 1.7448614834673814e-05, |
| "loss": 0.283, |
| "step": 1706 |
| }, |
| { |
| "epoch": 2.0591073582629673, |
| "grad_norm": 0.2902830958637698, |
| "learning_rate": 1.742627345844504e-05, |
| "loss": 0.2766, |
| "step": 1707 |
| }, |
| { |
| "epoch": 2.060313630880579, |
| "grad_norm": 0.2605225165985984, |
| "learning_rate": 1.7403932082216266e-05, |
| "loss": 0.3049, |
| "step": 1708 |
| }, |
| { |
| "epoch": 2.0615199034981906, |
| "grad_norm": 0.24065535000122262, |
| "learning_rate": 1.738159070598749e-05, |
| "loss": 0.2764, |
| "step": 1709 |
| }, |
| { |
| "epoch": 2.062726176115802, |
| "grad_norm": 0.268617788452376, |
| "learning_rate": 1.7359249329758714e-05, |
| "loss": 0.3052, |
| "step": 1710 |
| }, |
| { |
| "epoch": 2.063932448733414, |
| "grad_norm": 0.24609202820968598, |
| "learning_rate": 1.7336907953529938e-05, |
| "loss": 0.3027, |
| "step": 1711 |
| }, |
| { |
| "epoch": 2.0651387213510253, |
| "grad_norm": 0.24966047333966127, |
| "learning_rate": 1.7314566577301166e-05, |
| "loss": 0.2841, |
| "step": 1712 |
| }, |
| { |
| "epoch": 2.066344993968637, |
| "grad_norm": 0.23861829254548647, |
| "learning_rate": 1.7292225201072386e-05, |
| "loss": 0.2844, |
| "step": 1713 |
| }, |
| { |
| "epoch": 2.0675512665862485, |
| "grad_norm": 0.21580214631270747, |
| "learning_rate": 1.726988382484361e-05, |
| "loss": 0.2854, |
| "step": 1714 |
| }, |
| { |
| "epoch": 2.06875753920386, |
| "grad_norm": 0.23851902544110398, |
| "learning_rate": 1.7247542448614838e-05, |
| "loss": 0.2991, |
| "step": 1715 |
| }, |
| { |
| "epoch": 2.069963811821472, |
| "grad_norm": 0.26704566565966975, |
| "learning_rate": 1.7225201072386062e-05, |
| "loss": 0.3057, |
| "step": 1716 |
| }, |
| { |
| "epoch": 2.0711700844390832, |
| "grad_norm": 0.2921229741945177, |
| "learning_rate": 1.7202859696157283e-05, |
| "loss": 0.2876, |
| "step": 1717 |
| }, |
| { |
| "epoch": 2.0723763570566947, |
| "grad_norm": 0.2704074358886806, |
| "learning_rate": 1.718051831992851e-05, |
| "loss": 0.2915, |
| "step": 1718 |
| }, |
| { |
| "epoch": 2.0735826296743065, |
| "grad_norm": 0.26102313901994467, |
| "learning_rate": 1.7158176943699734e-05, |
| "loss": 0.2728, |
| "step": 1719 |
| }, |
| { |
| "epoch": 2.074788902291918, |
| "grad_norm": 0.31354244476135346, |
| "learning_rate": 1.7135835567470955e-05, |
| "loss": 0.2925, |
| "step": 1720 |
| }, |
| { |
| "epoch": 2.0759951749095293, |
| "grad_norm": 0.3101725582423187, |
| "learning_rate": 1.7113494191242182e-05, |
| "loss": 0.2978, |
| "step": 1721 |
| }, |
| { |
| "epoch": 2.077201447527141, |
| "grad_norm": 0.2503609628190097, |
| "learning_rate": 1.7091152815013406e-05, |
| "loss": 0.2994, |
| "step": 1722 |
| }, |
| { |
| "epoch": 2.0784077201447526, |
| "grad_norm": 0.3010569194193657, |
| "learning_rate": 1.706881143878463e-05, |
| "loss": 0.2724, |
| "step": 1723 |
| }, |
| { |
| "epoch": 2.0796139927623645, |
| "grad_norm": 0.3554100305822572, |
| "learning_rate": 1.7046470062555855e-05, |
| "loss": 0.286, |
| "step": 1724 |
| }, |
| { |
| "epoch": 2.080820265379976, |
| "grad_norm": 0.28020867058791404, |
| "learning_rate": 1.702412868632708e-05, |
| "loss": 0.2924, |
| "step": 1725 |
| }, |
| { |
| "epoch": 2.0820265379975873, |
| "grad_norm": 0.27399523525779423, |
| "learning_rate": 1.7001787310098303e-05, |
| "loss": 0.2816, |
| "step": 1726 |
| }, |
| { |
| "epoch": 2.083232810615199, |
| "grad_norm": 0.30202972438037023, |
| "learning_rate": 1.6979445933869527e-05, |
| "loss": 0.2883, |
| "step": 1727 |
| }, |
| { |
| "epoch": 2.0844390832328106, |
| "grad_norm": 0.25985583955795966, |
| "learning_rate": 1.695710455764075e-05, |
| "loss": 0.2931, |
| "step": 1728 |
| }, |
| { |
| "epoch": 2.085645355850422, |
| "grad_norm": 0.23082414332906082, |
| "learning_rate": 1.6934763181411975e-05, |
| "loss": 0.2891, |
| "step": 1729 |
| }, |
| { |
| "epoch": 2.086851628468034, |
| "grad_norm": 0.2637973752139721, |
| "learning_rate": 1.6912421805183202e-05, |
| "loss": 0.291, |
| "step": 1730 |
| }, |
| { |
| "epoch": 2.0880579010856453, |
| "grad_norm": 0.31611307943527334, |
| "learning_rate": 1.6890080428954423e-05, |
| "loss": 0.2871, |
| "step": 1731 |
| }, |
| { |
| "epoch": 2.089264173703257, |
| "grad_norm": 0.27342534759657844, |
| "learning_rate": 1.6867739052725647e-05, |
| "loss": 0.2922, |
| "step": 1732 |
| }, |
| { |
| "epoch": 2.0904704463208685, |
| "grad_norm": 0.23859203460193187, |
| "learning_rate": 1.6845397676496875e-05, |
| "loss": 0.2916, |
| "step": 1733 |
| }, |
| { |
| "epoch": 2.09167671893848, |
| "grad_norm": 0.2894846792042188, |
| "learning_rate": 1.68230563002681e-05, |
| "loss": 0.2882, |
| "step": 1734 |
| }, |
| { |
| "epoch": 2.092882991556092, |
| "grad_norm": 0.2880969256927605, |
| "learning_rate": 1.680071492403932e-05, |
| "loss": 0.3016, |
| "step": 1735 |
| }, |
| { |
| "epoch": 2.0940892641737032, |
| "grad_norm": 0.24513385363687332, |
| "learning_rate": 1.6778373547810547e-05, |
| "loss": 0.2812, |
| "step": 1736 |
| }, |
| { |
| "epoch": 2.0952955367913146, |
| "grad_norm": 0.254098444251658, |
| "learning_rate": 1.675603217158177e-05, |
| "loss": 0.2919, |
| "step": 1737 |
| }, |
| { |
| "epoch": 2.0965018094089265, |
| "grad_norm": 0.2633220043801049, |
| "learning_rate": 1.6733690795352995e-05, |
| "loss": 0.2848, |
| "step": 1738 |
| }, |
| { |
| "epoch": 2.097708082026538, |
| "grad_norm": 0.2940832498604142, |
| "learning_rate": 1.671134941912422e-05, |
| "loss": 0.292, |
| "step": 1739 |
| }, |
| { |
| "epoch": 2.0989143546441498, |
| "grad_norm": 0.24954777585396712, |
| "learning_rate": 1.6689008042895443e-05, |
| "loss": 0.282, |
| "step": 1740 |
| }, |
| { |
| "epoch": 2.100120627261761, |
| "grad_norm": 0.248278540621884, |
| "learning_rate": 1.6666666666666667e-05, |
| "loss": 0.2894, |
| "step": 1741 |
| }, |
| { |
| "epoch": 2.1013268998793726, |
| "grad_norm": 0.2811884680572496, |
| "learning_rate": 1.664432529043789e-05, |
| "loss": 0.2865, |
| "step": 1742 |
| }, |
| { |
| "epoch": 2.1025331724969845, |
| "grad_norm": 0.2522636866338131, |
| "learning_rate": 1.6621983914209116e-05, |
| "loss": 0.2933, |
| "step": 1743 |
| }, |
| { |
| "epoch": 2.103739445114596, |
| "grad_norm": 0.2770783365680852, |
| "learning_rate": 1.659964253798034e-05, |
| "loss": 0.2738, |
| "step": 1744 |
| }, |
| { |
| "epoch": 2.1049457177322073, |
| "grad_norm": 0.25751293080579474, |
| "learning_rate": 1.6577301161751567e-05, |
| "loss": 0.2831, |
| "step": 1745 |
| }, |
| { |
| "epoch": 2.106151990349819, |
| "grad_norm": 0.22569893040934857, |
| "learning_rate": 1.6554959785522788e-05, |
| "loss": 0.2807, |
| "step": 1746 |
| }, |
| { |
| "epoch": 2.1073582629674306, |
| "grad_norm": 0.2676508071107855, |
| "learning_rate": 1.6532618409294012e-05, |
| "loss": 0.2827, |
| "step": 1747 |
| }, |
| { |
| "epoch": 2.1085645355850424, |
| "grad_norm": 0.22742787799565736, |
| "learning_rate": 1.651027703306524e-05, |
| "loss": 0.2729, |
| "step": 1748 |
| }, |
| { |
| "epoch": 2.109770808202654, |
| "grad_norm": 0.24790912426013842, |
| "learning_rate": 1.648793565683646e-05, |
| "loss": 0.2699, |
| "step": 1749 |
| }, |
| { |
| "epoch": 2.1109770808202653, |
| "grad_norm": 0.2916106304957534, |
| "learning_rate": 1.6465594280607684e-05, |
| "loss": 0.2862, |
| "step": 1750 |
| }, |
| { |
| "epoch": 2.112183353437877, |
| "grad_norm": 0.2401141131980366, |
| "learning_rate": 1.6443252904378912e-05, |
| "loss": 0.2824, |
| "step": 1751 |
| }, |
| { |
| "epoch": 2.1133896260554885, |
| "grad_norm": 0.26241788076180006, |
| "learning_rate": 1.6420911528150136e-05, |
| "loss": 0.2767, |
| "step": 1752 |
| }, |
| { |
| "epoch": 2.1145958986731, |
| "grad_norm": 0.2954786816300289, |
| "learning_rate": 1.6398570151921357e-05, |
| "loss": 0.2959, |
| "step": 1753 |
| }, |
| { |
| "epoch": 2.115802171290712, |
| "grad_norm": 0.22908967311620249, |
| "learning_rate": 1.6376228775692584e-05, |
| "loss": 0.2758, |
| "step": 1754 |
| }, |
| { |
| "epoch": 2.117008443908323, |
| "grad_norm": 0.2413217007927014, |
| "learning_rate": 1.6353887399463808e-05, |
| "loss": 0.2757, |
| "step": 1755 |
| }, |
| { |
| "epoch": 2.118214716525935, |
| "grad_norm": 0.2773658127485155, |
| "learning_rate": 1.6331546023235032e-05, |
| "loss": 0.2948, |
| "step": 1756 |
| }, |
| { |
| "epoch": 2.1194209891435465, |
| "grad_norm": 0.22417241060997734, |
| "learning_rate": 1.6309204647006256e-05, |
| "loss": 0.2968, |
| "step": 1757 |
| }, |
| { |
| "epoch": 2.120627261761158, |
| "grad_norm": 0.2553527879768229, |
| "learning_rate": 1.628686327077748e-05, |
| "loss": 0.2884, |
| "step": 1758 |
| }, |
| { |
| "epoch": 2.1218335343787698, |
| "grad_norm": 0.25633754052987345, |
| "learning_rate": 1.6264521894548704e-05, |
| "loss": 0.2814, |
| "step": 1759 |
| }, |
| { |
| "epoch": 2.123039806996381, |
| "grad_norm": 0.2425672162311776, |
| "learning_rate": 1.624218051831993e-05, |
| "loss": 0.3019, |
| "step": 1760 |
| }, |
| { |
| "epoch": 2.1242460796139926, |
| "grad_norm": 0.22861022137422327, |
| "learning_rate": 1.6219839142091153e-05, |
| "loss": 0.2925, |
| "step": 1761 |
| }, |
| { |
| "epoch": 2.1254523522316044, |
| "grad_norm": 0.2840288790695893, |
| "learning_rate": 1.6197497765862377e-05, |
| "loss": 0.3063, |
| "step": 1762 |
| }, |
| { |
| "epoch": 2.126658624849216, |
| "grad_norm": 0.2537508729451685, |
| "learning_rate": 1.6175156389633604e-05, |
| "loss": 0.2933, |
| "step": 1763 |
| }, |
| { |
| "epoch": 2.1278648974668277, |
| "grad_norm": 0.24195914928488352, |
| "learning_rate": 1.6152815013404825e-05, |
| "loss": 0.2993, |
| "step": 1764 |
| }, |
| { |
| "epoch": 2.129071170084439, |
| "grad_norm": 0.27873420100035506, |
| "learning_rate": 1.613047363717605e-05, |
| "loss": 0.3141, |
| "step": 1765 |
| }, |
| { |
| "epoch": 2.1302774427020505, |
| "grad_norm": 0.268235827640432, |
| "learning_rate": 1.6108132260947276e-05, |
| "loss": 0.2931, |
| "step": 1766 |
| }, |
| { |
| "epoch": 2.1314837153196624, |
| "grad_norm": 0.2191238701653833, |
| "learning_rate": 1.60857908847185e-05, |
| "loss": 0.2732, |
| "step": 1767 |
| }, |
| { |
| "epoch": 2.132689987937274, |
| "grad_norm": 0.25437125752546474, |
| "learning_rate": 1.606344950848972e-05, |
| "loss": 0.2967, |
| "step": 1768 |
| }, |
| { |
| "epoch": 2.1338962605548852, |
| "grad_norm": 0.26624909826119847, |
| "learning_rate": 1.604110813226095e-05, |
| "loss": 0.2803, |
| "step": 1769 |
| }, |
| { |
| "epoch": 2.135102533172497, |
| "grad_norm": 0.24857107954142815, |
| "learning_rate": 1.6018766756032173e-05, |
| "loss": 0.2844, |
| "step": 1770 |
| }, |
| { |
| "epoch": 2.1363088057901085, |
| "grad_norm": 0.25654303344242035, |
| "learning_rate": 1.5996425379803397e-05, |
| "loss": 0.2763, |
| "step": 1771 |
| }, |
| { |
| "epoch": 2.13751507840772, |
| "grad_norm": 0.2725659911600105, |
| "learning_rate": 1.597408400357462e-05, |
| "loss": 0.2796, |
| "step": 1772 |
| }, |
| { |
| "epoch": 2.138721351025332, |
| "grad_norm": 0.2657062977643569, |
| "learning_rate": 1.5951742627345845e-05, |
| "loss": 0.2998, |
| "step": 1773 |
| }, |
| { |
| "epoch": 2.139927623642943, |
| "grad_norm": 0.24246975530355397, |
| "learning_rate": 1.5929401251117073e-05, |
| "loss": 0.2967, |
| "step": 1774 |
| }, |
| { |
| "epoch": 2.141133896260555, |
| "grad_norm": 0.24199935788003604, |
| "learning_rate": 1.5907059874888293e-05, |
| "loss": 0.2833, |
| "step": 1775 |
| }, |
| { |
| "epoch": 2.1423401688781665, |
| "grad_norm": 0.2670641237942198, |
| "learning_rate": 1.5884718498659517e-05, |
| "loss": 0.2856, |
| "step": 1776 |
| }, |
| { |
| "epoch": 2.143546441495778, |
| "grad_norm": 0.23890355072402358, |
| "learning_rate": 1.5862377122430745e-05, |
| "loss": 0.2728, |
| "step": 1777 |
| }, |
| { |
| "epoch": 2.1447527141133897, |
| "grad_norm": 0.23673195464701202, |
| "learning_rate": 1.5840035746201966e-05, |
| "loss": 0.3005, |
| "step": 1778 |
| }, |
| { |
| "epoch": 2.145958986731001, |
| "grad_norm": 0.2865067160256694, |
| "learning_rate": 1.581769436997319e-05, |
| "loss": 0.2959, |
| "step": 1779 |
| }, |
| { |
| "epoch": 2.147165259348613, |
| "grad_norm": 0.27302383294132143, |
| "learning_rate": 1.5795352993744417e-05, |
| "loss": 0.3018, |
| "step": 1780 |
| }, |
| { |
| "epoch": 2.1483715319662244, |
| "grad_norm": 0.23492163997305548, |
| "learning_rate": 1.577301161751564e-05, |
| "loss": 0.2848, |
| "step": 1781 |
| }, |
| { |
| "epoch": 2.149577804583836, |
| "grad_norm": 0.27129742825552006, |
| "learning_rate": 1.5750670241286862e-05, |
| "loss": 0.2958, |
| "step": 1782 |
| }, |
| { |
| "epoch": 2.1507840772014477, |
| "grad_norm": 0.2684880578576593, |
| "learning_rate": 1.572832886505809e-05, |
| "loss": 0.2903, |
| "step": 1783 |
| }, |
| { |
| "epoch": 2.151990349819059, |
| "grad_norm": 0.27404258961414674, |
| "learning_rate": 1.5705987488829313e-05, |
| "loss": 0.2881, |
| "step": 1784 |
| }, |
| { |
| "epoch": 2.1531966224366705, |
| "grad_norm": 0.2632922456415148, |
| "learning_rate": 1.5683646112600538e-05, |
| "loss": 0.3016, |
| "step": 1785 |
| }, |
| { |
| "epoch": 2.1544028950542824, |
| "grad_norm": 0.26663256503427807, |
| "learning_rate": 1.566130473637176e-05, |
| "loss": 0.2963, |
| "step": 1786 |
| }, |
| { |
| "epoch": 2.155609167671894, |
| "grad_norm": 0.2508496833033383, |
| "learning_rate": 1.5638963360142986e-05, |
| "loss": 0.2801, |
| "step": 1787 |
| }, |
| { |
| "epoch": 2.1568154402895052, |
| "grad_norm": 0.22978346023328622, |
| "learning_rate": 1.561662198391421e-05, |
| "loss": 0.2754, |
| "step": 1788 |
| }, |
| { |
| "epoch": 2.158021712907117, |
| "grad_norm": 0.2679042839697001, |
| "learning_rate": 1.5594280607685434e-05, |
| "loss": 0.2995, |
| "step": 1789 |
| }, |
| { |
| "epoch": 2.1592279855247285, |
| "grad_norm": 0.2690040951898322, |
| "learning_rate": 1.5571939231456658e-05, |
| "loss": 0.3033, |
| "step": 1790 |
| }, |
| { |
| "epoch": 2.1604342581423404, |
| "grad_norm": 0.2319672822255795, |
| "learning_rate": 1.5549597855227882e-05, |
| "loss": 0.2825, |
| "step": 1791 |
| }, |
| { |
| "epoch": 2.1616405307599518, |
| "grad_norm": 0.2794357797508637, |
| "learning_rate": 1.552725647899911e-05, |
| "loss": 0.2911, |
| "step": 1792 |
| }, |
| { |
| "epoch": 2.162846803377563, |
| "grad_norm": 0.21909695211407065, |
| "learning_rate": 1.550491510277033e-05, |
| "loss": 0.2659, |
| "step": 1793 |
| }, |
| { |
| "epoch": 2.164053075995175, |
| "grad_norm": 0.29375511141007266, |
| "learning_rate": 1.5482573726541554e-05, |
| "loss": 0.3114, |
| "step": 1794 |
| }, |
| { |
| "epoch": 2.1652593486127865, |
| "grad_norm": 0.2529271568454275, |
| "learning_rate": 1.5460232350312782e-05, |
| "loss": 0.2793, |
| "step": 1795 |
| }, |
| { |
| "epoch": 2.166465621230398, |
| "grad_norm": 0.24510286216006683, |
| "learning_rate": 1.5437890974084006e-05, |
| "loss": 0.2938, |
| "step": 1796 |
| }, |
| { |
| "epoch": 2.1676718938480097, |
| "grad_norm": 0.24633271965834536, |
| "learning_rate": 1.5415549597855227e-05, |
| "loss": 0.2936, |
| "step": 1797 |
| }, |
| { |
| "epoch": 2.168878166465621, |
| "grad_norm": 0.2908065624489396, |
| "learning_rate": 1.5393208221626454e-05, |
| "loss": 0.2943, |
| "step": 1798 |
| }, |
| { |
| "epoch": 2.170084439083233, |
| "grad_norm": 0.2749611947114117, |
| "learning_rate": 1.5370866845397678e-05, |
| "loss": 0.2871, |
| "step": 1799 |
| }, |
| { |
| "epoch": 2.1712907117008444, |
| "grad_norm": 0.2231876084061707, |
| "learning_rate": 1.5348525469168902e-05, |
| "loss": 0.2904, |
| "step": 1800 |
| }, |
| { |
| "epoch": 2.172496984318456, |
| "grad_norm": 0.22679976460521523, |
| "learning_rate": 1.5326184092940126e-05, |
| "loss": 0.294, |
| "step": 1801 |
| }, |
| { |
| "epoch": 2.1737032569360677, |
| "grad_norm": 0.24525986813590006, |
| "learning_rate": 1.530384271671135e-05, |
| "loss": 0.3015, |
| "step": 1802 |
| }, |
| { |
| "epoch": 2.174909529553679, |
| "grad_norm": 0.22623155707285916, |
| "learning_rate": 1.5281501340482574e-05, |
| "loss": 0.2772, |
| "step": 1803 |
| }, |
| { |
| "epoch": 2.1761158021712905, |
| "grad_norm": 0.23398318830707168, |
| "learning_rate": 1.52591599642538e-05, |
| "loss": 0.284, |
| "step": 1804 |
| }, |
| { |
| "epoch": 2.1773220747889024, |
| "grad_norm": 0.23119064864869804, |
| "learning_rate": 1.5236818588025023e-05, |
| "loss": 0.281, |
| "step": 1805 |
| }, |
| { |
| "epoch": 2.178528347406514, |
| "grad_norm": 0.22917560469727433, |
| "learning_rate": 1.5214477211796247e-05, |
| "loss": 0.2786, |
| "step": 1806 |
| }, |
| { |
| "epoch": 2.1797346200241257, |
| "grad_norm": 0.23631284216344356, |
| "learning_rate": 1.5192135835567473e-05, |
| "loss": 0.2713, |
| "step": 1807 |
| }, |
| { |
| "epoch": 2.180940892641737, |
| "grad_norm": 0.2488903695574401, |
| "learning_rate": 1.5169794459338697e-05, |
| "loss": 0.2774, |
| "step": 1808 |
| }, |
| { |
| "epoch": 2.1821471652593485, |
| "grad_norm": 0.2155531209044899, |
| "learning_rate": 1.5147453083109919e-05, |
| "loss": 0.2966, |
| "step": 1809 |
| }, |
| { |
| "epoch": 2.1833534378769603, |
| "grad_norm": 0.23495355035903273, |
| "learning_rate": 1.5125111706881145e-05, |
| "loss": 0.3087, |
| "step": 1810 |
| }, |
| { |
| "epoch": 2.1845597104945718, |
| "grad_norm": 0.234213301291648, |
| "learning_rate": 1.5102770330652369e-05, |
| "loss": 0.2762, |
| "step": 1811 |
| }, |
| { |
| "epoch": 2.185765983112183, |
| "grad_norm": 0.2778933084532547, |
| "learning_rate": 1.5080428954423593e-05, |
| "loss": 0.2851, |
| "step": 1812 |
| }, |
| { |
| "epoch": 2.186972255729795, |
| "grad_norm": 0.24101135672543364, |
| "learning_rate": 1.5058087578194819e-05, |
| "loss": 0.2793, |
| "step": 1813 |
| }, |
| { |
| "epoch": 2.1881785283474064, |
| "grad_norm": 0.2510040494621872, |
| "learning_rate": 1.5035746201966041e-05, |
| "loss": 0.2994, |
| "step": 1814 |
| }, |
| { |
| "epoch": 2.1893848009650183, |
| "grad_norm": 0.2128063834439931, |
| "learning_rate": 1.5013404825737265e-05, |
| "loss": 0.2838, |
| "step": 1815 |
| }, |
| { |
| "epoch": 2.1905910735826297, |
| "grad_norm": 0.24754013889868678, |
| "learning_rate": 1.4991063449508491e-05, |
| "loss": 0.3053, |
| "step": 1816 |
| }, |
| { |
| "epoch": 2.191797346200241, |
| "grad_norm": 0.2581031353095379, |
| "learning_rate": 1.4968722073279715e-05, |
| "loss": 0.288, |
| "step": 1817 |
| }, |
| { |
| "epoch": 2.193003618817853, |
| "grad_norm": 0.23487250557642658, |
| "learning_rate": 1.4946380697050938e-05, |
| "loss": 0.2986, |
| "step": 1818 |
| }, |
| { |
| "epoch": 2.1942098914354644, |
| "grad_norm": 0.20696922559019662, |
| "learning_rate": 1.4924039320822163e-05, |
| "loss": 0.2813, |
| "step": 1819 |
| }, |
| { |
| "epoch": 2.195416164053076, |
| "grad_norm": 0.220731832587104, |
| "learning_rate": 1.4901697944593387e-05, |
| "loss": 0.2748, |
| "step": 1820 |
| }, |
| { |
| "epoch": 2.1966224366706877, |
| "grad_norm": 0.25166555395330575, |
| "learning_rate": 1.4879356568364611e-05, |
| "loss": 0.285, |
| "step": 1821 |
| }, |
| { |
| "epoch": 2.197828709288299, |
| "grad_norm": 0.23349864934060624, |
| "learning_rate": 1.4857015192135837e-05, |
| "loss": 0.2786, |
| "step": 1822 |
| }, |
| { |
| "epoch": 2.1990349819059105, |
| "grad_norm": 0.2352295275890298, |
| "learning_rate": 1.483467381590706e-05, |
| "loss": 0.2901, |
| "step": 1823 |
| }, |
| { |
| "epoch": 2.2002412545235224, |
| "grad_norm": 0.24218185360551212, |
| "learning_rate": 1.4812332439678284e-05, |
| "loss": 0.269, |
| "step": 1824 |
| }, |
| { |
| "epoch": 2.201447527141134, |
| "grad_norm": 0.23597928948583063, |
| "learning_rate": 1.478999106344951e-05, |
| "loss": 0.2852, |
| "step": 1825 |
| }, |
| { |
| "epoch": 2.2026537997587456, |
| "grad_norm": 0.20882073193696107, |
| "learning_rate": 1.4767649687220734e-05, |
| "loss": 0.2703, |
| "step": 1826 |
| }, |
| { |
| "epoch": 2.203860072376357, |
| "grad_norm": 0.24223129133976676, |
| "learning_rate": 1.4745308310991956e-05, |
| "loss": 0.2921, |
| "step": 1827 |
| }, |
| { |
| "epoch": 2.2050663449939685, |
| "grad_norm": 0.246628534539299, |
| "learning_rate": 1.4722966934763183e-05, |
| "loss": 0.2917, |
| "step": 1828 |
| }, |
| { |
| "epoch": 2.2062726176115803, |
| "grad_norm": 0.2302693025242607, |
| "learning_rate": 1.4700625558534406e-05, |
| "loss": 0.2709, |
| "step": 1829 |
| }, |
| { |
| "epoch": 2.2074788902291917, |
| "grad_norm": 0.224038143545482, |
| "learning_rate": 1.467828418230563e-05, |
| "loss": 0.2851, |
| "step": 1830 |
| }, |
| { |
| "epoch": 2.2086851628468036, |
| "grad_norm": 0.23867623831655044, |
| "learning_rate": 1.4655942806076856e-05, |
| "loss": 0.282, |
| "step": 1831 |
| }, |
| { |
| "epoch": 2.209891435464415, |
| "grad_norm": 0.23595428041685104, |
| "learning_rate": 1.4633601429848078e-05, |
| "loss": 0.279, |
| "step": 1832 |
| }, |
| { |
| "epoch": 2.2110977080820264, |
| "grad_norm": 0.2397339096346714, |
| "learning_rate": 1.4611260053619302e-05, |
| "loss": 0.2938, |
| "step": 1833 |
| }, |
| { |
| "epoch": 2.2123039806996383, |
| "grad_norm": 0.25086601174251194, |
| "learning_rate": 1.4588918677390528e-05, |
| "loss": 0.2881, |
| "step": 1834 |
| }, |
| { |
| "epoch": 2.2135102533172497, |
| "grad_norm": 0.2979190537928123, |
| "learning_rate": 1.4566577301161752e-05, |
| "loss": 0.3057, |
| "step": 1835 |
| }, |
| { |
| "epoch": 2.214716525934861, |
| "grad_norm": 0.23603851376001234, |
| "learning_rate": 1.4544235924932978e-05, |
| "loss": 0.2907, |
| "step": 1836 |
| }, |
| { |
| "epoch": 2.215922798552473, |
| "grad_norm": 0.22529168222023993, |
| "learning_rate": 1.4521894548704202e-05, |
| "loss": 0.2992, |
| "step": 1837 |
| }, |
| { |
| "epoch": 2.2171290711700844, |
| "grad_norm": 0.22362154450561503, |
| "learning_rate": 1.4499553172475424e-05, |
| "loss": 0.2694, |
| "step": 1838 |
| }, |
| { |
| "epoch": 2.218335343787696, |
| "grad_norm": 0.22302860615250908, |
| "learning_rate": 1.447721179624665e-05, |
| "loss": 0.284, |
| "step": 1839 |
| }, |
| { |
| "epoch": 2.2195416164053077, |
| "grad_norm": 0.22904183729570707, |
| "learning_rate": 1.4454870420017874e-05, |
| "loss": 0.2795, |
| "step": 1840 |
| }, |
| { |
| "epoch": 2.220747889022919, |
| "grad_norm": 0.26782453766381153, |
| "learning_rate": 1.4432529043789098e-05, |
| "loss": 0.2929, |
| "step": 1841 |
| }, |
| { |
| "epoch": 2.221954161640531, |
| "grad_norm": 0.21714316481031462, |
| "learning_rate": 1.4410187667560324e-05, |
| "loss": 0.2731, |
| "step": 1842 |
| }, |
| { |
| "epoch": 2.2231604342581424, |
| "grad_norm": 0.26186668193353185, |
| "learning_rate": 1.4387846291331546e-05, |
| "loss": 0.2839, |
| "step": 1843 |
| }, |
| { |
| "epoch": 2.2243667068757538, |
| "grad_norm": 0.2911995525000822, |
| "learning_rate": 1.436550491510277e-05, |
| "loss": 0.2972, |
| "step": 1844 |
| }, |
| { |
| "epoch": 2.2255729794933656, |
| "grad_norm": 0.24276375836112515, |
| "learning_rate": 1.4343163538873996e-05, |
| "loss": 0.2914, |
| "step": 1845 |
| }, |
| { |
| "epoch": 2.226779252110977, |
| "grad_norm": 0.24600706178312343, |
| "learning_rate": 1.432082216264522e-05, |
| "loss": 0.2945, |
| "step": 1846 |
| }, |
| { |
| "epoch": 2.2279855247285885, |
| "grad_norm": 0.22011966635882427, |
| "learning_rate": 1.4298480786416443e-05, |
| "loss": 0.2705, |
| "step": 1847 |
| }, |
| { |
| "epoch": 2.2291917973462003, |
| "grad_norm": 0.22274947815980448, |
| "learning_rate": 1.4276139410187669e-05, |
| "loss": 0.2858, |
| "step": 1848 |
| }, |
| { |
| "epoch": 2.2303980699638117, |
| "grad_norm": 0.24312414836648066, |
| "learning_rate": 1.4253798033958893e-05, |
| "loss": 0.275, |
| "step": 1849 |
| }, |
| { |
| "epoch": 2.2316043425814236, |
| "grad_norm": 0.25667716385389866, |
| "learning_rate": 1.4231456657730117e-05, |
| "loss": 0.2971, |
| "step": 1850 |
| }, |
| { |
| "epoch": 2.232810615199035, |
| "grad_norm": 0.23198803574642948, |
| "learning_rate": 1.4209115281501343e-05, |
| "loss": 0.2849, |
| "step": 1851 |
| }, |
| { |
| "epoch": 2.2340168878166464, |
| "grad_norm": 0.2401993446695254, |
| "learning_rate": 1.4186773905272565e-05, |
| "loss": 0.2916, |
| "step": 1852 |
| }, |
| { |
| "epoch": 2.2352231604342583, |
| "grad_norm": 0.2510433024863049, |
| "learning_rate": 1.4164432529043789e-05, |
| "loss": 0.2735, |
| "step": 1853 |
| }, |
| { |
| "epoch": 2.2364294330518697, |
| "grad_norm": 0.2404864962858689, |
| "learning_rate": 1.4142091152815015e-05, |
| "loss": 0.2892, |
| "step": 1854 |
| }, |
| { |
| "epoch": 2.237635705669481, |
| "grad_norm": 0.22016088446747195, |
| "learning_rate": 1.4119749776586239e-05, |
| "loss": 0.2798, |
| "step": 1855 |
| }, |
| { |
| "epoch": 2.238841978287093, |
| "grad_norm": 0.24403858808762297, |
| "learning_rate": 1.4097408400357461e-05, |
| "loss": 0.2967, |
| "step": 1856 |
| }, |
| { |
| "epoch": 2.2400482509047044, |
| "grad_norm": 0.23489950259808423, |
| "learning_rate": 1.4075067024128689e-05, |
| "loss": 0.2788, |
| "step": 1857 |
| }, |
| { |
| "epoch": 2.2412545235223162, |
| "grad_norm": 0.25532204034310485, |
| "learning_rate": 1.4052725647899911e-05, |
| "loss": 0.295, |
| "step": 1858 |
| }, |
| { |
| "epoch": 2.2424607961399277, |
| "grad_norm": 0.251973767975049, |
| "learning_rate": 1.4030384271671135e-05, |
| "loss": 0.3009, |
| "step": 1859 |
| }, |
| { |
| "epoch": 2.243667068757539, |
| "grad_norm": 0.217744929867661, |
| "learning_rate": 1.4008042895442361e-05, |
| "loss": 0.292, |
| "step": 1860 |
| }, |
| { |
| "epoch": 2.244873341375151, |
| "grad_norm": 0.20972929540640886, |
| "learning_rate": 1.3985701519213583e-05, |
| "loss": 0.2753, |
| "step": 1861 |
| }, |
| { |
| "epoch": 2.2460796139927623, |
| "grad_norm": 0.2593676734049886, |
| "learning_rate": 1.3963360142984808e-05, |
| "loss": 0.2945, |
| "step": 1862 |
| }, |
| { |
| "epoch": 2.2472858866103738, |
| "grad_norm": 0.2407026323892429, |
| "learning_rate": 1.3941018766756033e-05, |
| "loss": 0.2921, |
| "step": 1863 |
| }, |
| { |
| "epoch": 2.2484921592279856, |
| "grad_norm": 0.21792380789916738, |
| "learning_rate": 1.3918677390527257e-05, |
| "loss": 0.2968, |
| "step": 1864 |
| }, |
| { |
| "epoch": 2.249698431845597, |
| "grad_norm": 0.2294246384984617, |
| "learning_rate": 1.389633601429848e-05, |
| "loss": 0.2894, |
| "step": 1865 |
| }, |
| { |
| "epoch": 2.250904704463209, |
| "grad_norm": 0.2352525773133271, |
| "learning_rate": 1.3873994638069707e-05, |
| "loss": 0.2801, |
| "step": 1866 |
| }, |
| { |
| "epoch": 2.2521109770808203, |
| "grad_norm": 0.2261284819996082, |
| "learning_rate": 1.385165326184093e-05, |
| "loss": 0.2672, |
| "step": 1867 |
| }, |
| { |
| "epoch": 2.2533172496984317, |
| "grad_norm": 0.23872374706253088, |
| "learning_rate": 1.3829311885612154e-05, |
| "loss": 0.2956, |
| "step": 1868 |
| }, |
| { |
| "epoch": 2.2545235223160436, |
| "grad_norm": 0.23826064709676967, |
| "learning_rate": 1.380697050938338e-05, |
| "loss": 0.284, |
| "step": 1869 |
| }, |
| { |
| "epoch": 2.255729794933655, |
| "grad_norm": 0.2294043053362506, |
| "learning_rate": 1.3784629133154604e-05, |
| "loss": 0.2819, |
| "step": 1870 |
| }, |
| { |
| "epoch": 2.2569360675512664, |
| "grad_norm": 0.23258701478035285, |
| "learning_rate": 1.3762287756925826e-05, |
| "loss": 0.2697, |
| "step": 1871 |
| }, |
| { |
| "epoch": 2.2581423401688783, |
| "grad_norm": 0.204987775131546, |
| "learning_rate": 1.3739946380697052e-05, |
| "loss": 0.2875, |
| "step": 1872 |
| }, |
| { |
| "epoch": 2.2593486127864897, |
| "grad_norm": 0.24311420445354, |
| "learning_rate": 1.3717605004468276e-05, |
| "loss": 0.2797, |
| "step": 1873 |
| }, |
| { |
| "epoch": 2.260554885404101, |
| "grad_norm": 0.2706634799528978, |
| "learning_rate": 1.3695263628239498e-05, |
| "loss": 0.3022, |
| "step": 1874 |
| }, |
| { |
| "epoch": 2.261761158021713, |
| "grad_norm": 0.23301429893513645, |
| "learning_rate": 1.3672922252010726e-05, |
| "loss": 0.2736, |
| "step": 1875 |
| }, |
| { |
| "epoch": 2.2629674306393244, |
| "grad_norm": 0.2227015979945295, |
| "learning_rate": 1.3650580875781948e-05, |
| "loss": 0.2892, |
| "step": 1876 |
| }, |
| { |
| "epoch": 2.2641737032569362, |
| "grad_norm": 0.2518005952205528, |
| "learning_rate": 1.3628239499553172e-05, |
| "loss": 0.289, |
| "step": 1877 |
| }, |
| { |
| "epoch": 2.2653799758745476, |
| "grad_norm": 0.22770033941273815, |
| "learning_rate": 1.3605898123324398e-05, |
| "loss": 0.2707, |
| "step": 1878 |
| }, |
| { |
| "epoch": 2.266586248492159, |
| "grad_norm": 0.25592147280730076, |
| "learning_rate": 1.3583556747095622e-05, |
| "loss": 0.2984, |
| "step": 1879 |
| }, |
| { |
| "epoch": 2.267792521109771, |
| "grad_norm": 0.24678852398410292, |
| "learning_rate": 1.3561215370866845e-05, |
| "loss": 0.2893, |
| "step": 1880 |
| }, |
| { |
| "epoch": 2.2689987937273823, |
| "grad_norm": 0.21640104205658625, |
| "learning_rate": 1.353887399463807e-05, |
| "loss": 0.284, |
| "step": 1881 |
| }, |
| { |
| "epoch": 2.270205066344994, |
| "grad_norm": 0.22263483357092576, |
| "learning_rate": 1.3516532618409294e-05, |
| "loss": 0.2756, |
| "step": 1882 |
| }, |
| { |
| "epoch": 2.2714113389626056, |
| "grad_norm": 0.22549139263598314, |
| "learning_rate": 1.3494191242180519e-05, |
| "loss": 0.2804, |
| "step": 1883 |
| }, |
| { |
| "epoch": 2.272617611580217, |
| "grad_norm": 0.22848000714044425, |
| "learning_rate": 1.3471849865951744e-05, |
| "loss": 0.2943, |
| "step": 1884 |
| }, |
| { |
| "epoch": 2.273823884197829, |
| "grad_norm": 0.22965966451255318, |
| "learning_rate": 1.3449508489722967e-05, |
| "loss": 0.2939, |
| "step": 1885 |
| }, |
| { |
| "epoch": 2.2750301568154403, |
| "grad_norm": 0.22002800402535225, |
| "learning_rate": 1.342716711349419e-05, |
| "loss": 0.2708, |
| "step": 1886 |
| }, |
| { |
| "epoch": 2.2762364294330517, |
| "grad_norm": 0.22824703002205038, |
| "learning_rate": 1.3404825737265417e-05, |
| "loss": 0.2959, |
| "step": 1887 |
| }, |
| { |
| "epoch": 2.2774427020506636, |
| "grad_norm": 0.2175045283912371, |
| "learning_rate": 1.338248436103664e-05, |
| "loss": 0.2956, |
| "step": 1888 |
| }, |
| { |
| "epoch": 2.278648974668275, |
| "grad_norm": 0.2324410570763942, |
| "learning_rate": 1.3360142984807863e-05, |
| "loss": 0.2933, |
| "step": 1889 |
| }, |
| { |
| "epoch": 2.2798552472858864, |
| "grad_norm": 0.2291968726319126, |
| "learning_rate": 1.333780160857909e-05, |
| "loss": 0.289, |
| "step": 1890 |
| }, |
| { |
| "epoch": 2.2810615199034983, |
| "grad_norm": 0.23648168240852283, |
| "learning_rate": 1.3315460232350313e-05, |
| "loss": 0.3109, |
| "step": 1891 |
| }, |
| { |
| "epoch": 2.2822677925211097, |
| "grad_norm": 0.2231262318277235, |
| "learning_rate": 1.3293118856121537e-05, |
| "loss": 0.2835, |
| "step": 1892 |
| }, |
| { |
| "epoch": 2.2834740651387215, |
| "grad_norm": 0.23818818699053612, |
| "learning_rate": 1.3270777479892763e-05, |
| "loss": 0.2831, |
| "step": 1893 |
| }, |
| { |
| "epoch": 2.284680337756333, |
| "grad_norm": 0.2434904932138937, |
| "learning_rate": 1.3248436103663985e-05, |
| "loss": 0.294, |
| "step": 1894 |
| }, |
| { |
| "epoch": 2.2858866103739444, |
| "grad_norm": 0.21286990891188562, |
| "learning_rate": 1.322609472743521e-05, |
| "loss": 0.2756, |
| "step": 1895 |
| }, |
| { |
| "epoch": 2.287092882991556, |
| "grad_norm": 0.23610309357095516, |
| "learning_rate": 1.3203753351206435e-05, |
| "loss": 0.2905, |
| "step": 1896 |
| }, |
| { |
| "epoch": 2.2882991556091676, |
| "grad_norm": 0.240924967821922, |
| "learning_rate": 1.3181411974977659e-05, |
| "loss": 0.2721, |
| "step": 1897 |
| }, |
| { |
| "epoch": 2.2895054282267795, |
| "grad_norm": 0.2555889113252634, |
| "learning_rate": 1.3159070598748885e-05, |
| "loss": 0.3004, |
| "step": 1898 |
| }, |
| { |
| "epoch": 2.290711700844391, |
| "grad_norm": 0.2374596441116695, |
| "learning_rate": 1.3136729222520109e-05, |
| "loss": 0.276, |
| "step": 1899 |
| }, |
| { |
| "epoch": 2.2919179734620023, |
| "grad_norm": 0.22527696891452795, |
| "learning_rate": 1.3114387846291331e-05, |
| "loss": 0.2762, |
| "step": 1900 |
| }, |
| { |
| "epoch": 2.293124246079614, |
| "grad_norm": 0.25567802002526824, |
| "learning_rate": 1.3092046470062557e-05, |
| "loss": 0.2813, |
| "step": 1901 |
| }, |
| { |
| "epoch": 2.2943305186972256, |
| "grad_norm": 0.21906439644766362, |
| "learning_rate": 1.3069705093833781e-05, |
| "loss": 0.2875, |
| "step": 1902 |
| }, |
| { |
| "epoch": 2.295536791314837, |
| "grad_norm": 0.22064470665788954, |
| "learning_rate": 1.3047363717605005e-05, |
| "loss": 0.2857, |
| "step": 1903 |
| }, |
| { |
| "epoch": 2.296743063932449, |
| "grad_norm": 0.23151609094727654, |
| "learning_rate": 1.3025022341376231e-05, |
| "loss": 0.2842, |
| "step": 1904 |
| }, |
| { |
| "epoch": 2.2979493365500603, |
| "grad_norm": 0.2351690748078775, |
| "learning_rate": 1.3002680965147454e-05, |
| "loss": 0.2769, |
| "step": 1905 |
| }, |
| { |
| "epoch": 2.2991556091676717, |
| "grad_norm": 0.23004031471475894, |
| "learning_rate": 1.2980339588918678e-05, |
| "loss": 0.2915, |
| "step": 1906 |
| }, |
| { |
| "epoch": 2.3003618817852836, |
| "grad_norm": 0.21332903284760457, |
| "learning_rate": 1.2957998212689903e-05, |
| "loss": 0.2695, |
| "step": 1907 |
| }, |
| { |
| "epoch": 2.301568154402895, |
| "grad_norm": 0.2536337679173862, |
| "learning_rate": 1.2935656836461127e-05, |
| "loss": 0.2854, |
| "step": 1908 |
| }, |
| { |
| "epoch": 2.3027744270205064, |
| "grad_norm": 0.26978055805109497, |
| "learning_rate": 1.291331546023235e-05, |
| "loss": 0.2822, |
| "step": 1909 |
| }, |
| { |
| "epoch": 2.3039806996381182, |
| "grad_norm": 0.21833218367797522, |
| "learning_rate": 1.2890974084003576e-05, |
| "loss": 0.2751, |
| "step": 1910 |
| }, |
| { |
| "epoch": 2.3051869722557297, |
| "grad_norm": 0.213236596889352, |
| "learning_rate": 1.28686327077748e-05, |
| "loss": 0.2767, |
| "step": 1911 |
| }, |
| { |
| "epoch": 2.3063932448733415, |
| "grad_norm": 0.25023452493910114, |
| "learning_rate": 1.2846291331546024e-05, |
| "loss": 0.2727, |
| "step": 1912 |
| }, |
| { |
| "epoch": 2.307599517490953, |
| "grad_norm": 0.25357039532276837, |
| "learning_rate": 1.282394995531725e-05, |
| "loss": 0.2887, |
| "step": 1913 |
| }, |
| { |
| "epoch": 2.3088057901085643, |
| "grad_norm": 0.22829308034251444, |
| "learning_rate": 1.2801608579088472e-05, |
| "loss": 0.2766, |
| "step": 1914 |
| }, |
| { |
| "epoch": 2.310012062726176, |
| "grad_norm": 0.22565008404788817, |
| "learning_rate": 1.2779267202859696e-05, |
| "loss": 0.299, |
| "step": 1915 |
| }, |
| { |
| "epoch": 2.3112183353437876, |
| "grad_norm": 0.23456861896586176, |
| "learning_rate": 1.2756925826630922e-05, |
| "loss": 0.2747, |
| "step": 1916 |
| }, |
| { |
| "epoch": 2.3124246079613995, |
| "grad_norm": 0.23597661508111467, |
| "learning_rate": 1.2734584450402146e-05, |
| "loss": 0.2894, |
| "step": 1917 |
| }, |
| { |
| "epoch": 2.313630880579011, |
| "grad_norm": 0.23786461250041235, |
| "learning_rate": 1.2712243074173368e-05, |
| "loss": 0.2907, |
| "step": 1918 |
| }, |
| { |
| "epoch": 2.3148371531966223, |
| "grad_norm": 0.2534049640911676, |
| "learning_rate": 1.2689901697944596e-05, |
| "loss": 0.287, |
| "step": 1919 |
| }, |
| { |
| "epoch": 2.316043425814234, |
| "grad_norm": 0.22090587312904558, |
| "learning_rate": 1.2667560321715818e-05, |
| "loss": 0.2789, |
| "step": 1920 |
| }, |
| { |
| "epoch": 2.3172496984318456, |
| "grad_norm": 0.235549531483714, |
| "learning_rate": 1.2645218945487042e-05, |
| "loss": 0.286, |
| "step": 1921 |
| }, |
| { |
| "epoch": 2.318455971049457, |
| "grad_norm": 0.25142631802715215, |
| "learning_rate": 1.2622877569258268e-05, |
| "loss": 0.2846, |
| "step": 1922 |
| }, |
| { |
| "epoch": 2.319662243667069, |
| "grad_norm": 0.2524115813131248, |
| "learning_rate": 1.260053619302949e-05, |
| "loss": 0.2877, |
| "step": 1923 |
| }, |
| { |
| "epoch": 2.3208685162846803, |
| "grad_norm": 0.24100570731736226, |
| "learning_rate": 1.2578194816800715e-05, |
| "loss": 0.2905, |
| "step": 1924 |
| }, |
| { |
| "epoch": 2.3220747889022917, |
| "grad_norm": 0.23658962005375492, |
| "learning_rate": 1.255585344057194e-05, |
| "loss": 0.3116, |
| "step": 1925 |
| }, |
| { |
| "epoch": 2.3232810615199035, |
| "grad_norm": 0.2366915639739183, |
| "learning_rate": 1.2533512064343164e-05, |
| "loss": 0.2653, |
| "step": 1926 |
| }, |
| { |
| "epoch": 2.324487334137515, |
| "grad_norm": 0.24126309010219563, |
| "learning_rate": 1.2511170688114387e-05, |
| "loss": 0.2944, |
| "step": 1927 |
| }, |
| { |
| "epoch": 2.325693606755127, |
| "grad_norm": 0.256417259208843, |
| "learning_rate": 1.2488829311885613e-05, |
| "loss": 0.299, |
| "step": 1928 |
| }, |
| { |
| "epoch": 2.3268998793727382, |
| "grad_norm": 0.2533720151680546, |
| "learning_rate": 1.2466487935656837e-05, |
| "loss": 0.288, |
| "step": 1929 |
| }, |
| { |
| "epoch": 2.3281061519903496, |
| "grad_norm": 0.24913672554051416, |
| "learning_rate": 1.2444146559428063e-05, |
| "loss": 0.3067, |
| "step": 1930 |
| }, |
| { |
| "epoch": 2.3293124246079615, |
| "grad_norm": 0.24341114503544165, |
| "learning_rate": 1.2421805183199285e-05, |
| "loss": 0.2763, |
| "step": 1931 |
| }, |
| { |
| "epoch": 2.330518697225573, |
| "grad_norm": 0.2547834249597416, |
| "learning_rate": 1.239946380697051e-05, |
| "loss": 0.2936, |
| "step": 1932 |
| }, |
| { |
| "epoch": 2.331724969843185, |
| "grad_norm": 0.23300171268866948, |
| "learning_rate": 1.2377122430741735e-05, |
| "loss": 0.2934, |
| "step": 1933 |
| }, |
| { |
| "epoch": 2.332931242460796, |
| "grad_norm": 0.24474857033348835, |
| "learning_rate": 1.2354781054512959e-05, |
| "loss": 0.2887, |
| "step": 1934 |
| }, |
| { |
| "epoch": 2.3341375150784076, |
| "grad_norm": 0.2151206611437992, |
| "learning_rate": 1.2332439678284183e-05, |
| "loss": 0.2836, |
| "step": 1935 |
| }, |
| { |
| "epoch": 2.3353437876960195, |
| "grad_norm": 0.24359081217137948, |
| "learning_rate": 1.2310098302055407e-05, |
| "loss": 0.2987, |
| "step": 1936 |
| }, |
| { |
| "epoch": 2.336550060313631, |
| "grad_norm": 0.2558446677582328, |
| "learning_rate": 1.2287756925826631e-05, |
| "loss": 0.2736, |
| "step": 1937 |
| }, |
| { |
| "epoch": 2.3377563329312423, |
| "grad_norm": 0.23133494031808785, |
| "learning_rate": 1.2265415549597855e-05, |
| "loss": 0.2967, |
| "step": 1938 |
| }, |
| { |
| "epoch": 2.338962605548854, |
| "grad_norm": 0.22011413912360578, |
| "learning_rate": 1.2243074173369081e-05, |
| "loss": 0.2758, |
| "step": 1939 |
| }, |
| { |
| "epoch": 2.3401688781664656, |
| "grad_norm": 0.2465890608008465, |
| "learning_rate": 1.2220732797140303e-05, |
| "loss": 0.2862, |
| "step": 1940 |
| }, |
| { |
| "epoch": 2.341375150784077, |
| "grad_norm": 0.23400609607040407, |
| "learning_rate": 1.219839142091153e-05, |
| "loss": 0.2833, |
| "step": 1941 |
| }, |
| { |
| "epoch": 2.342581423401689, |
| "grad_norm": 0.2267979596723007, |
| "learning_rate": 1.2176050044682753e-05, |
| "loss": 0.29, |
| "step": 1942 |
| }, |
| { |
| "epoch": 2.3437876960193003, |
| "grad_norm": 0.24060657688093173, |
| "learning_rate": 1.2153708668453977e-05, |
| "loss": 0.295, |
| "step": 1943 |
| }, |
| { |
| "epoch": 2.344993968636912, |
| "grad_norm": 0.2152614594026681, |
| "learning_rate": 1.2131367292225201e-05, |
| "loss": 0.275, |
| "step": 1944 |
| }, |
| { |
| "epoch": 2.3462002412545235, |
| "grad_norm": 0.21991178211682616, |
| "learning_rate": 1.2109025915996426e-05, |
| "loss": 0.2734, |
| "step": 1945 |
| }, |
| { |
| "epoch": 2.347406513872135, |
| "grad_norm": 0.2421436523612531, |
| "learning_rate": 1.2086684539767651e-05, |
| "loss": 0.2944, |
| "step": 1946 |
| }, |
| { |
| "epoch": 2.348612786489747, |
| "grad_norm": 0.21785789529396196, |
| "learning_rate": 1.2064343163538874e-05, |
| "loss": 0.2836, |
| "step": 1947 |
| }, |
| { |
| "epoch": 2.349819059107358, |
| "grad_norm": 0.2223002921132739, |
| "learning_rate": 1.20420017873101e-05, |
| "loss": 0.2927, |
| "step": 1948 |
| }, |
| { |
| "epoch": 2.35102533172497, |
| "grad_norm": 0.23810589562126233, |
| "learning_rate": 1.2019660411081324e-05, |
| "loss": 0.2884, |
| "step": 1949 |
| }, |
| { |
| "epoch": 2.3522316043425815, |
| "grad_norm": 0.23259108876620938, |
| "learning_rate": 1.1997319034852548e-05, |
| "loss": 0.2826, |
| "step": 1950 |
| }, |
| { |
| "epoch": 2.353437876960193, |
| "grad_norm": 0.22456352439209676, |
| "learning_rate": 1.1974977658623772e-05, |
| "loss": 0.3101, |
| "step": 1951 |
| }, |
| { |
| "epoch": 2.3546441495778048, |
| "grad_norm": 0.24091153223400058, |
| "learning_rate": 1.1952636282394996e-05, |
| "loss": 0.3015, |
| "step": 1952 |
| }, |
| { |
| "epoch": 2.355850422195416, |
| "grad_norm": 0.23719969534608104, |
| "learning_rate": 1.193029490616622e-05, |
| "loss": 0.2764, |
| "step": 1953 |
| }, |
| { |
| "epoch": 2.3570566948130276, |
| "grad_norm": 0.2231954319848683, |
| "learning_rate": 1.1907953529937444e-05, |
| "loss": 0.2889, |
| "step": 1954 |
| }, |
| { |
| "epoch": 2.3582629674306395, |
| "grad_norm": 0.23040423833861387, |
| "learning_rate": 1.188561215370867e-05, |
| "loss": 0.2832, |
| "step": 1955 |
| }, |
| { |
| "epoch": 2.359469240048251, |
| "grad_norm": 0.22536151153551331, |
| "learning_rate": 1.1863270777479892e-05, |
| "loss": 0.284, |
| "step": 1956 |
| }, |
| { |
| "epoch": 2.3606755126658623, |
| "grad_norm": 0.21133558876391853, |
| "learning_rate": 1.1840929401251118e-05, |
| "loss": 0.2949, |
| "step": 1957 |
| }, |
| { |
| "epoch": 2.361881785283474, |
| "grad_norm": 0.22376185910186625, |
| "learning_rate": 1.1818588025022342e-05, |
| "loss": 0.2949, |
| "step": 1958 |
| }, |
| { |
| "epoch": 2.3630880579010856, |
| "grad_norm": 0.21550391712375538, |
| "learning_rate": 1.1796246648793566e-05, |
| "loss": 0.2767, |
| "step": 1959 |
| }, |
| { |
| "epoch": 2.3642943305186974, |
| "grad_norm": 0.21187486236001407, |
| "learning_rate": 1.177390527256479e-05, |
| "loss": 0.2948, |
| "step": 1960 |
| }, |
| { |
| "epoch": 2.365500603136309, |
| "grad_norm": 0.23091281680192807, |
| "learning_rate": 1.1751563896336016e-05, |
| "loss": 0.2961, |
| "step": 1961 |
| }, |
| { |
| "epoch": 2.3667068757539202, |
| "grad_norm": 0.23769114515813178, |
| "learning_rate": 1.1729222520107238e-05, |
| "loss": 0.2665, |
| "step": 1962 |
| }, |
| { |
| "epoch": 2.367913148371532, |
| "grad_norm": 0.22891068293328487, |
| "learning_rate": 1.1706881143878464e-05, |
| "loss": 0.2807, |
| "step": 1963 |
| }, |
| { |
| "epoch": 2.3691194209891435, |
| "grad_norm": 0.24591993183344646, |
| "learning_rate": 1.1684539767649688e-05, |
| "loss": 0.2877, |
| "step": 1964 |
| }, |
| { |
| "epoch": 2.370325693606755, |
| "grad_norm": 0.25582946324967376, |
| "learning_rate": 1.166219839142091e-05, |
| "loss": 0.2923, |
| "step": 1965 |
| }, |
| { |
| "epoch": 2.371531966224367, |
| "grad_norm": 0.22913321677896634, |
| "learning_rate": 1.1639857015192136e-05, |
| "loss": 0.2889, |
| "step": 1966 |
| }, |
| { |
| "epoch": 2.372738238841978, |
| "grad_norm": 0.246974008296304, |
| "learning_rate": 1.161751563896336e-05, |
| "loss": 0.2941, |
| "step": 1967 |
| }, |
| { |
| "epoch": 2.37394451145959, |
| "grad_norm": 0.2277084018677113, |
| "learning_rate": 1.1595174262734585e-05, |
| "loss": 0.2888, |
| "step": 1968 |
| }, |
| { |
| "epoch": 2.3751507840772015, |
| "grad_norm": 0.23156695668294586, |
| "learning_rate": 1.1572832886505809e-05, |
| "loss": 0.2833, |
| "step": 1969 |
| }, |
| { |
| "epoch": 2.376357056694813, |
| "grad_norm": 0.2525648596511969, |
| "learning_rate": 1.1550491510277035e-05, |
| "loss": 0.2798, |
| "step": 1970 |
| }, |
| { |
| "epoch": 2.3775633293124248, |
| "grad_norm": 0.2205192096748547, |
| "learning_rate": 1.1528150134048257e-05, |
| "loss": 0.2825, |
| "step": 1971 |
| }, |
| { |
| "epoch": 2.378769601930036, |
| "grad_norm": 0.23904424195465934, |
| "learning_rate": 1.1505808757819483e-05, |
| "loss": 0.2951, |
| "step": 1972 |
| }, |
| { |
| "epoch": 2.3799758745476476, |
| "grad_norm": 0.2362564226232104, |
| "learning_rate": 1.1483467381590707e-05, |
| "loss": 0.2992, |
| "step": 1973 |
| }, |
| { |
| "epoch": 2.3811821471652594, |
| "grad_norm": 0.23386421917567346, |
| "learning_rate": 1.1461126005361931e-05, |
| "loss": 0.2956, |
| "step": 1974 |
| }, |
| { |
| "epoch": 2.382388419782871, |
| "grad_norm": 0.26306584218072937, |
| "learning_rate": 1.1438784629133155e-05, |
| "loss": 0.2964, |
| "step": 1975 |
| }, |
| { |
| "epoch": 2.3835946924004823, |
| "grad_norm": 0.22880228551731152, |
| "learning_rate": 1.1416443252904379e-05, |
| "loss": 0.2986, |
| "step": 1976 |
| }, |
| { |
| "epoch": 2.384800965018094, |
| "grad_norm": 0.20846641089392354, |
| "learning_rate": 1.1394101876675605e-05, |
| "loss": 0.2926, |
| "step": 1977 |
| }, |
| { |
| "epoch": 2.3860072376357055, |
| "grad_norm": 0.23144685907710733, |
| "learning_rate": 1.1371760500446827e-05, |
| "loss": 0.2813, |
| "step": 1978 |
| }, |
| { |
| "epoch": 2.3872135102533174, |
| "grad_norm": 0.22403595755031833, |
| "learning_rate": 1.1349419124218053e-05, |
| "loss": 0.2779, |
| "step": 1979 |
| }, |
| { |
| "epoch": 2.388419782870929, |
| "grad_norm": 0.23705522152129233, |
| "learning_rate": 1.1327077747989277e-05, |
| "loss": 0.2913, |
| "step": 1980 |
| }, |
| { |
| "epoch": 2.3896260554885402, |
| "grad_norm": 0.20647613466526085, |
| "learning_rate": 1.1304736371760501e-05, |
| "loss": 0.2853, |
| "step": 1981 |
| }, |
| { |
| "epoch": 2.390832328106152, |
| "grad_norm": 0.2047928540204018, |
| "learning_rate": 1.1282394995531725e-05, |
| "loss": 0.2828, |
| "step": 1982 |
| }, |
| { |
| "epoch": 2.3920386007237635, |
| "grad_norm": 0.21929372924006985, |
| "learning_rate": 1.126005361930295e-05, |
| "loss": 0.2859, |
| "step": 1983 |
| }, |
| { |
| "epoch": 2.3932448733413754, |
| "grad_norm": 0.21794959265971092, |
| "learning_rate": 1.1237712243074173e-05, |
| "loss": 0.2765, |
| "step": 1984 |
| }, |
| { |
| "epoch": 2.394451145958987, |
| "grad_norm": 0.2316415929799842, |
| "learning_rate": 1.1215370866845398e-05, |
| "loss": 0.2775, |
| "step": 1985 |
| }, |
| { |
| "epoch": 2.395657418576598, |
| "grad_norm": 0.23705174878505497, |
| "learning_rate": 1.1193029490616623e-05, |
| "loss": 0.3013, |
| "step": 1986 |
| }, |
| { |
| "epoch": 2.39686369119421, |
| "grad_norm": 0.22799762544468544, |
| "learning_rate": 1.1170688114387846e-05, |
| "loss": 0.2877, |
| "step": 1987 |
| }, |
| { |
| "epoch": 2.3980699638118215, |
| "grad_norm": 0.21882815233428807, |
| "learning_rate": 1.1148346738159071e-05, |
| "loss": 0.2845, |
| "step": 1988 |
| }, |
| { |
| "epoch": 2.399276236429433, |
| "grad_norm": 0.20344632297160425, |
| "learning_rate": 1.1126005361930296e-05, |
| "loss": 0.2833, |
| "step": 1989 |
| }, |
| { |
| "epoch": 2.4004825090470447, |
| "grad_norm": 0.23503552618475457, |
| "learning_rate": 1.110366398570152e-05, |
| "loss": 0.2872, |
| "step": 1990 |
| }, |
| { |
| "epoch": 2.401688781664656, |
| "grad_norm": 0.22374098805502607, |
| "learning_rate": 1.1081322609472744e-05, |
| "loss": 0.2855, |
| "step": 1991 |
| }, |
| { |
| "epoch": 2.4028950542822676, |
| "grad_norm": 0.21020918399892033, |
| "learning_rate": 1.105898123324397e-05, |
| "loss": 0.2876, |
| "step": 1992 |
| }, |
| { |
| "epoch": 2.4041013268998794, |
| "grad_norm": 0.23373376474000448, |
| "learning_rate": 1.1036639857015192e-05, |
| "loss": 0.307, |
| "step": 1993 |
| }, |
| { |
| "epoch": 2.405307599517491, |
| "grad_norm": 0.2104550476556823, |
| "learning_rate": 1.1014298480786418e-05, |
| "loss": 0.2896, |
| "step": 1994 |
| }, |
| { |
| "epoch": 2.4065138721351027, |
| "grad_norm": 0.20137896629238947, |
| "learning_rate": 1.0991957104557642e-05, |
| "loss": 0.2981, |
| "step": 1995 |
| }, |
| { |
| "epoch": 2.407720144752714, |
| "grad_norm": 0.23012273495698185, |
| "learning_rate": 1.0969615728328864e-05, |
| "loss": 0.2843, |
| "step": 1996 |
| }, |
| { |
| "epoch": 2.4089264173703255, |
| "grad_norm": 0.20650123033912035, |
| "learning_rate": 1.094727435210009e-05, |
| "loss": 0.2853, |
| "step": 1997 |
| }, |
| { |
| "epoch": 2.4101326899879374, |
| "grad_norm": 0.21987946438527867, |
| "learning_rate": 1.0924932975871314e-05, |
| "loss": 0.2751, |
| "step": 1998 |
| }, |
| { |
| "epoch": 2.411338962605549, |
| "grad_norm": 0.23424456071416347, |
| "learning_rate": 1.0902591599642538e-05, |
| "loss": 0.3035, |
| "step": 1999 |
| }, |
| { |
| "epoch": 2.4125452352231607, |
| "grad_norm": 0.2149327854552427, |
| "learning_rate": 1.0880250223413762e-05, |
| "loss": 0.2925, |
| "step": 2000 |
| }, |
| { |
| "epoch": 2.413751507840772, |
| "grad_norm": 0.2084100213854339, |
| "learning_rate": 1.0857908847184988e-05, |
| "loss": 0.2747, |
| "step": 2001 |
| }, |
| { |
| "epoch": 2.4149577804583835, |
| "grad_norm": 0.23570811547273146, |
| "learning_rate": 1.083556747095621e-05, |
| "loss": 0.2906, |
| "step": 2002 |
| }, |
| { |
| "epoch": 2.4161640530759954, |
| "grad_norm": 0.2257613385126158, |
| "learning_rate": 1.0813226094727436e-05, |
| "loss": 0.3031, |
| "step": 2003 |
| }, |
| { |
| "epoch": 2.4173703256936068, |
| "grad_norm": 0.2365614982781767, |
| "learning_rate": 1.079088471849866e-05, |
| "loss": 0.2948, |
| "step": 2004 |
| }, |
| { |
| "epoch": 2.418576598311218, |
| "grad_norm": 0.22036504428958142, |
| "learning_rate": 1.0768543342269884e-05, |
| "loss": 0.2899, |
| "step": 2005 |
| }, |
| { |
| "epoch": 2.41978287092883, |
| "grad_norm": 0.22931384043977412, |
| "learning_rate": 1.0746201966041108e-05, |
| "loss": 0.2957, |
| "step": 2006 |
| }, |
| { |
| "epoch": 2.4209891435464415, |
| "grad_norm": 0.22648620005109288, |
| "learning_rate": 1.0723860589812333e-05, |
| "loss": 0.2921, |
| "step": 2007 |
| }, |
| { |
| "epoch": 2.422195416164053, |
| "grad_norm": 0.22020345396728686, |
| "learning_rate": 1.0701519213583558e-05, |
| "loss": 0.2889, |
| "step": 2008 |
| }, |
| { |
| "epoch": 2.4234016887816647, |
| "grad_norm": 0.23024407336074107, |
| "learning_rate": 1.067917783735478e-05, |
| "loss": 0.292, |
| "step": 2009 |
| }, |
| { |
| "epoch": 2.424607961399276, |
| "grad_norm": 0.21920021299966, |
| "learning_rate": 1.0656836461126007e-05, |
| "loss": 0.2792, |
| "step": 2010 |
| }, |
| { |
| "epoch": 2.425814234016888, |
| "grad_norm": 0.22348139341258205, |
| "learning_rate": 1.063449508489723e-05, |
| "loss": 0.2981, |
| "step": 2011 |
| }, |
| { |
| "epoch": 2.4270205066344994, |
| "grad_norm": 0.21145115022972605, |
| "learning_rate": 1.0612153708668455e-05, |
| "loss": 0.2807, |
| "step": 2012 |
| }, |
| { |
| "epoch": 2.428226779252111, |
| "grad_norm": 0.22074860399576737, |
| "learning_rate": 1.0589812332439679e-05, |
| "loss": 0.3048, |
| "step": 2013 |
| }, |
| { |
| "epoch": 2.4294330518697227, |
| "grad_norm": 0.2529900639870893, |
| "learning_rate": 1.0567470956210903e-05, |
| "loss": 0.2811, |
| "step": 2014 |
| }, |
| { |
| "epoch": 2.430639324487334, |
| "grad_norm": 0.2166067972140964, |
| "learning_rate": 1.0545129579982127e-05, |
| "loss": 0.2796, |
| "step": 2015 |
| }, |
| { |
| "epoch": 2.431845597104946, |
| "grad_norm": 0.21597450621839834, |
| "learning_rate": 1.0522788203753351e-05, |
| "loss": 0.2978, |
| "step": 2016 |
| }, |
| { |
| "epoch": 2.4330518697225574, |
| "grad_norm": 0.24619094911709458, |
| "learning_rate": 1.0500446827524577e-05, |
| "loss": 0.2933, |
| "step": 2017 |
| }, |
| { |
| "epoch": 2.434258142340169, |
| "grad_norm": 0.22294280116564494, |
| "learning_rate": 1.04781054512958e-05, |
| "loss": 0.2899, |
| "step": 2018 |
| }, |
| { |
| "epoch": 2.4354644149577807, |
| "grad_norm": 0.2218284276829594, |
| "learning_rate": 1.0455764075067025e-05, |
| "loss": 0.2946, |
| "step": 2019 |
| }, |
| { |
| "epoch": 2.436670687575392, |
| "grad_norm": 0.25621556450007477, |
| "learning_rate": 1.0433422698838249e-05, |
| "loss": 0.2868, |
| "step": 2020 |
| }, |
| { |
| "epoch": 2.4378769601930035, |
| "grad_norm": 0.22991632882380855, |
| "learning_rate": 1.0411081322609473e-05, |
| "loss": 0.2895, |
| "step": 2021 |
| }, |
| { |
| "epoch": 2.4390832328106153, |
| "grad_norm": 0.2260095543571356, |
| "learning_rate": 1.0388739946380697e-05, |
| "loss": 0.3088, |
| "step": 2022 |
| }, |
| { |
| "epoch": 2.4402895054282268, |
| "grad_norm": 0.22973240522262808, |
| "learning_rate": 1.0366398570151923e-05, |
| "loss": 0.2935, |
| "step": 2023 |
| }, |
| { |
| "epoch": 2.441495778045838, |
| "grad_norm": 0.23705590633564524, |
| "learning_rate": 1.0344057193923145e-05, |
| "loss": 0.2916, |
| "step": 2024 |
| }, |
| { |
| "epoch": 2.44270205066345, |
| "grad_norm": 0.2514158757053114, |
| "learning_rate": 1.0321715817694371e-05, |
| "loss": 0.2817, |
| "step": 2025 |
| }, |
| { |
| "epoch": 2.4439083232810614, |
| "grad_norm": 0.2198108837195466, |
| "learning_rate": 1.0299374441465595e-05, |
| "loss": 0.2884, |
| "step": 2026 |
| }, |
| { |
| "epoch": 2.445114595898673, |
| "grad_norm": 0.2504281500366312, |
| "learning_rate": 1.0277033065236818e-05, |
| "loss": 0.2989, |
| "step": 2027 |
| }, |
| { |
| "epoch": 2.4463208685162847, |
| "grad_norm": 0.2504968434548994, |
| "learning_rate": 1.0254691689008044e-05, |
| "loss": 0.2914, |
| "step": 2028 |
| }, |
| { |
| "epoch": 2.447527141133896, |
| "grad_norm": 0.21234515057169162, |
| "learning_rate": 1.0232350312779268e-05, |
| "loss": 0.2799, |
| "step": 2029 |
| }, |
| { |
| "epoch": 2.448733413751508, |
| "grad_norm": 0.21297856434093013, |
| "learning_rate": 1.0210008936550492e-05, |
| "loss": 0.2785, |
| "step": 2030 |
| }, |
| { |
| "epoch": 2.4499396863691194, |
| "grad_norm": 0.21905395061242522, |
| "learning_rate": 1.0187667560321716e-05, |
| "loss": 0.2738, |
| "step": 2031 |
| }, |
| { |
| "epoch": 2.451145958986731, |
| "grad_norm": 0.23870861408031346, |
| "learning_rate": 1.0165326184092942e-05, |
| "loss": 0.2829, |
| "step": 2032 |
| }, |
| { |
| "epoch": 2.4523522316043427, |
| "grad_norm": 0.2110603313911652, |
| "learning_rate": 1.0142984807864164e-05, |
| "loss": 0.2907, |
| "step": 2033 |
| }, |
| { |
| "epoch": 2.453558504221954, |
| "grad_norm": 0.2079993100221868, |
| "learning_rate": 1.012064343163539e-05, |
| "loss": 0.2797, |
| "step": 2034 |
| }, |
| { |
| "epoch": 2.454764776839566, |
| "grad_norm": 0.19952965390672966, |
| "learning_rate": 1.0098302055406614e-05, |
| "loss": 0.2821, |
| "step": 2035 |
| }, |
| { |
| "epoch": 2.4559710494571774, |
| "grad_norm": 0.3063957219989011, |
| "learning_rate": 1.0075960679177838e-05, |
| "loss": 0.2916, |
| "step": 2036 |
| }, |
| { |
| "epoch": 2.457177322074789, |
| "grad_norm": 0.2573913236576673, |
| "learning_rate": 1.0053619302949062e-05, |
| "loss": 0.2936, |
| "step": 2037 |
| }, |
| { |
| "epoch": 2.4583835946924006, |
| "grad_norm": 0.23628800661282728, |
| "learning_rate": 1.0031277926720286e-05, |
| "loss": 0.2997, |
| "step": 2038 |
| }, |
| { |
| "epoch": 2.459589867310012, |
| "grad_norm": 0.3707608999179138, |
| "learning_rate": 1.0008936550491512e-05, |
| "loss": 0.2921, |
| "step": 2039 |
| }, |
| { |
| "epoch": 2.4607961399276235, |
| "grad_norm": 0.2355724932937776, |
| "learning_rate": 9.986595174262734e-06, |
| "loss": 0.2842, |
| "step": 2040 |
| }, |
| { |
| "epoch": 2.4620024125452353, |
| "grad_norm": 0.20979675529449615, |
| "learning_rate": 9.96425379803396e-06, |
| "loss": 0.2745, |
| "step": 2041 |
| }, |
| { |
| "epoch": 2.4632086851628467, |
| "grad_norm": 0.23092304983801692, |
| "learning_rate": 9.941912421805184e-06, |
| "loss": 0.2991, |
| "step": 2042 |
| }, |
| { |
| "epoch": 2.464414957780458, |
| "grad_norm": 0.23268676022732815, |
| "learning_rate": 9.919571045576408e-06, |
| "loss": 0.2839, |
| "step": 2043 |
| }, |
| { |
| "epoch": 2.46562123039807, |
| "grad_norm": 0.22396777883699703, |
| "learning_rate": 9.897229669347632e-06, |
| "loss": 0.2968, |
| "step": 2044 |
| }, |
| { |
| "epoch": 2.4668275030156814, |
| "grad_norm": 0.26078276763958036, |
| "learning_rate": 9.874888293118856e-06, |
| "loss": 0.2971, |
| "step": 2045 |
| }, |
| { |
| "epoch": 2.4680337756332933, |
| "grad_norm": 0.24364965152903365, |
| "learning_rate": 9.85254691689008e-06, |
| "loss": 0.2895, |
| "step": 2046 |
| }, |
| { |
| "epoch": 2.4692400482509047, |
| "grad_norm": 0.24028537875953107, |
| "learning_rate": 9.830205540661305e-06, |
| "loss": 0.2878, |
| "step": 2047 |
| }, |
| { |
| "epoch": 2.470446320868516, |
| "grad_norm": 0.22503830564530097, |
| "learning_rate": 9.80786416443253e-06, |
| "loss": 0.2823, |
| "step": 2048 |
| }, |
| { |
| "epoch": 2.471652593486128, |
| "grad_norm": 0.23918310040715698, |
| "learning_rate": 9.785522788203753e-06, |
| "loss": 0.2976, |
| "step": 2049 |
| }, |
| { |
| "epoch": 2.4728588661037394, |
| "grad_norm": 0.23847496984436617, |
| "learning_rate": 9.763181411974979e-06, |
| "loss": 0.2841, |
| "step": 2050 |
| }, |
| { |
| "epoch": 2.4740651387213513, |
| "grad_norm": 0.21340888253047746, |
| "learning_rate": 9.740840035746203e-06, |
| "loss": 0.2751, |
| "step": 2051 |
| }, |
| { |
| "epoch": 2.4752714113389627, |
| "grad_norm": 0.27284485919627044, |
| "learning_rate": 9.718498659517427e-06, |
| "loss": 0.2983, |
| "step": 2052 |
| }, |
| { |
| "epoch": 2.476477683956574, |
| "grad_norm": 0.23262515940334016, |
| "learning_rate": 9.69615728328865e-06, |
| "loss": 0.2893, |
| "step": 2053 |
| }, |
| { |
| "epoch": 2.477683956574186, |
| "grad_norm": 0.21320522352667567, |
| "learning_rate": 9.673815907059877e-06, |
| "loss": 0.2904, |
| "step": 2054 |
| }, |
| { |
| "epoch": 2.4788902291917974, |
| "grad_norm": 0.21281048078611334, |
| "learning_rate": 9.651474530831099e-06, |
| "loss": 0.2783, |
| "step": 2055 |
| }, |
| { |
| "epoch": 2.4800965018094088, |
| "grad_norm": 0.24467691806482708, |
| "learning_rate": 9.629133154602323e-06, |
| "loss": 0.2854, |
| "step": 2056 |
| }, |
| { |
| "epoch": 2.4813027744270206, |
| "grad_norm": 0.253676597589621, |
| "learning_rate": 9.606791778373549e-06, |
| "loss": 0.3071, |
| "step": 2057 |
| }, |
| { |
| "epoch": 2.482509047044632, |
| "grad_norm": 0.23522323243807983, |
| "learning_rate": 9.584450402144771e-06, |
| "loss": 0.2883, |
| "step": 2058 |
| }, |
| { |
| "epoch": 2.4837153196622435, |
| "grad_norm": 0.21968405569335833, |
| "learning_rate": 9.562109025915997e-06, |
| "loss": 0.293, |
| "step": 2059 |
| }, |
| { |
| "epoch": 2.4849215922798553, |
| "grad_norm": 0.23336070313958515, |
| "learning_rate": 9.539767649687221e-06, |
| "loss": 0.2996, |
| "step": 2060 |
| }, |
| { |
| "epoch": 2.4861278648974667, |
| "grad_norm": 0.23574400321619537, |
| "learning_rate": 9.517426273458445e-06, |
| "loss": 0.319, |
| "step": 2061 |
| }, |
| { |
| "epoch": 2.4873341375150786, |
| "grad_norm": 0.25284110850110175, |
| "learning_rate": 9.49508489722967e-06, |
| "loss": 0.3039, |
| "step": 2062 |
| }, |
| { |
| "epoch": 2.48854041013269, |
| "grad_norm": 0.22609295713824532, |
| "learning_rate": 9.472743521000895e-06, |
| "loss": 0.2889, |
| "step": 2063 |
| }, |
| { |
| "epoch": 2.4897466827503014, |
| "grad_norm": 0.21782544942151286, |
| "learning_rate": 9.450402144772117e-06, |
| "loss": 0.2773, |
| "step": 2064 |
| }, |
| { |
| "epoch": 2.4909529553679133, |
| "grad_norm": 0.24395759916835988, |
| "learning_rate": 9.428060768543343e-06, |
| "loss": 0.2872, |
| "step": 2065 |
| }, |
| { |
| "epoch": 2.4921592279855247, |
| "grad_norm": 0.2100854112198129, |
| "learning_rate": 9.405719392314567e-06, |
| "loss": 0.2683, |
| "step": 2066 |
| }, |
| { |
| "epoch": 2.4933655006031366, |
| "grad_norm": 0.22018277045218249, |
| "learning_rate": 9.383378016085791e-06, |
| "loss": 0.2989, |
| "step": 2067 |
| }, |
| { |
| "epoch": 2.494571773220748, |
| "grad_norm": 0.22632341437009562, |
| "learning_rate": 9.361036639857016e-06, |
| "loss": 0.2764, |
| "step": 2068 |
| }, |
| { |
| "epoch": 2.4957780458383594, |
| "grad_norm": 0.23134887518421296, |
| "learning_rate": 9.33869526362824e-06, |
| "loss": 0.2621, |
| "step": 2069 |
| }, |
| { |
| "epoch": 2.4969843184559712, |
| "grad_norm": 0.2259347845947623, |
| "learning_rate": 9.316353887399465e-06, |
| "loss": 0.3099, |
| "step": 2070 |
| }, |
| { |
| "epoch": 2.4981905910735827, |
| "grad_norm": 0.21947311486952517, |
| "learning_rate": 9.294012511170688e-06, |
| "loss": 0.2836, |
| "step": 2071 |
| }, |
| { |
| "epoch": 2.499396863691194, |
| "grad_norm": 0.21153193775768228, |
| "learning_rate": 9.271671134941914e-06, |
| "loss": 0.2656, |
| "step": 2072 |
| }, |
| { |
| "epoch": 2.500603136308806, |
| "grad_norm": 0.22972463052223152, |
| "learning_rate": 9.249329758713138e-06, |
| "loss": 0.2958, |
| "step": 2073 |
| }, |
| { |
| "epoch": 2.5018094089264173, |
| "grad_norm": 0.22427297366530236, |
| "learning_rate": 9.226988382484362e-06, |
| "loss": 0.2846, |
| "step": 2074 |
| }, |
| { |
| "epoch": 2.5030156815440288, |
| "grad_norm": 0.2188475129476444, |
| "learning_rate": 9.204647006255586e-06, |
| "loss": 0.2822, |
| "step": 2075 |
| }, |
| { |
| "epoch": 2.5042219541616406, |
| "grad_norm": 0.22157141957950305, |
| "learning_rate": 9.18230563002681e-06, |
| "loss": 0.2859, |
| "step": 2076 |
| }, |
| { |
| "epoch": 2.505428226779252, |
| "grad_norm": 0.20369669117130704, |
| "learning_rate": 9.159964253798034e-06, |
| "loss": 0.2623, |
| "step": 2077 |
| }, |
| { |
| "epoch": 2.5066344993968634, |
| "grad_norm": 0.26069163206394774, |
| "learning_rate": 9.137622877569258e-06, |
| "loss": 0.3071, |
| "step": 2078 |
| }, |
| { |
| "epoch": 2.5078407720144753, |
| "grad_norm": 0.21427683157651178, |
| "learning_rate": 9.115281501340484e-06, |
| "loss": 0.2855, |
| "step": 2079 |
| }, |
| { |
| "epoch": 2.5090470446320867, |
| "grad_norm": 0.20008581879419765, |
| "learning_rate": 9.092940125111706e-06, |
| "loss": 0.2815, |
| "step": 2080 |
| }, |
| { |
| "epoch": 2.5102533172496986, |
| "grad_norm": 0.24743260230654526, |
| "learning_rate": 9.070598748882932e-06, |
| "loss": 0.2882, |
| "step": 2081 |
| }, |
| { |
| "epoch": 2.51145958986731, |
| "grad_norm": 0.234746962743461, |
| "learning_rate": 9.048257372654156e-06, |
| "loss": 0.2867, |
| "step": 2082 |
| }, |
| { |
| "epoch": 2.512665862484922, |
| "grad_norm": 0.23547248016350894, |
| "learning_rate": 9.02591599642538e-06, |
| "loss": 0.2846, |
| "step": 2083 |
| }, |
| { |
| "epoch": 2.5138721351025333, |
| "grad_norm": 0.22598658102916713, |
| "learning_rate": 9.003574620196604e-06, |
| "loss": 0.295, |
| "step": 2084 |
| }, |
| { |
| "epoch": 2.5150784077201447, |
| "grad_norm": 0.2464188752316304, |
| "learning_rate": 8.98123324396783e-06, |
| "loss": 0.2962, |
| "step": 2085 |
| }, |
| { |
| "epoch": 2.5162846803377565, |
| "grad_norm": 0.20823067381368154, |
| "learning_rate": 8.958891867739052e-06, |
| "loss": 0.2761, |
| "step": 2086 |
| }, |
| { |
| "epoch": 2.517490952955368, |
| "grad_norm": 0.2450510002186486, |
| "learning_rate": 8.936550491510277e-06, |
| "loss": 0.2865, |
| "step": 2087 |
| }, |
| { |
| "epoch": 2.5186972255729794, |
| "grad_norm": 0.24205249299984813, |
| "learning_rate": 8.914209115281502e-06, |
| "loss": 0.3093, |
| "step": 2088 |
| }, |
| { |
| "epoch": 2.5199034981905912, |
| "grad_norm": 0.24457280923901634, |
| "learning_rate": 8.891867739052725e-06, |
| "loss": 0.2896, |
| "step": 2089 |
| }, |
| { |
| "epoch": 2.5211097708082026, |
| "grad_norm": 0.21212091302235664, |
| "learning_rate": 8.86952636282395e-06, |
| "loss": 0.2833, |
| "step": 2090 |
| }, |
| { |
| "epoch": 2.522316043425814, |
| "grad_norm": 0.23580463237099886, |
| "learning_rate": 8.847184986595175e-06, |
| "loss": 0.2784, |
| "step": 2091 |
| }, |
| { |
| "epoch": 2.523522316043426, |
| "grad_norm": 0.2388276288521073, |
| "learning_rate": 8.824843610366399e-06, |
| "loss": 0.2906, |
| "step": 2092 |
| }, |
| { |
| "epoch": 2.5247285886610373, |
| "grad_norm": 0.22087481614885304, |
| "learning_rate": 8.802502234137623e-06, |
| "loss": 0.2763, |
| "step": 2093 |
| }, |
| { |
| "epoch": 2.5259348612786487, |
| "grad_norm": 0.2246523474210579, |
| "learning_rate": 8.780160857908849e-06, |
| "loss": 0.2842, |
| "step": 2094 |
| }, |
| { |
| "epoch": 2.5271411338962606, |
| "grad_norm": 0.23119978686478482, |
| "learning_rate": 8.757819481680071e-06, |
| "loss": 0.2892, |
| "step": 2095 |
| }, |
| { |
| "epoch": 2.528347406513872, |
| "grad_norm": 0.23233171332683422, |
| "learning_rate": 8.735478105451297e-06, |
| "loss": 0.2907, |
| "step": 2096 |
| }, |
| { |
| "epoch": 2.529553679131484, |
| "grad_norm": 0.21942382453851111, |
| "learning_rate": 8.71313672922252e-06, |
| "loss": 0.2872, |
| "step": 2097 |
| }, |
| { |
| "epoch": 2.5307599517490953, |
| "grad_norm": 0.2413498290860737, |
| "learning_rate": 8.690795352993745e-06, |
| "loss": 0.2875, |
| "step": 2098 |
| }, |
| { |
| "epoch": 2.531966224366707, |
| "grad_norm": 0.23083935116777204, |
| "learning_rate": 8.668453976764969e-06, |
| "loss": 0.2762, |
| "step": 2099 |
| }, |
| { |
| "epoch": 2.5331724969843186, |
| "grad_norm": 0.21390651009006564, |
| "learning_rate": 8.646112600536193e-06, |
| "loss": 0.2912, |
| "step": 2100 |
| }, |
| { |
| "epoch": 2.53437876960193, |
| "grad_norm": 0.2365999128822495, |
| "learning_rate": 8.623771224307419e-06, |
| "loss": 0.2918, |
| "step": 2101 |
| }, |
| { |
| "epoch": 2.535585042219542, |
| "grad_norm": 0.2322084809252393, |
| "learning_rate": 8.601429848078641e-06, |
| "loss": 0.2866, |
| "step": 2102 |
| }, |
| { |
| "epoch": 2.5367913148371533, |
| "grad_norm": 0.21403628042564693, |
| "learning_rate": 8.579088471849867e-06, |
| "loss": 0.2715, |
| "step": 2103 |
| }, |
| { |
| "epoch": 2.5379975874547647, |
| "grad_norm": 0.23633271492330205, |
| "learning_rate": 8.556747095621091e-06, |
| "loss": 0.3122, |
| "step": 2104 |
| }, |
| { |
| "epoch": 2.5392038600723765, |
| "grad_norm": 0.2171261190451419, |
| "learning_rate": 8.534405719392315e-06, |
| "loss": 0.2669, |
| "step": 2105 |
| }, |
| { |
| "epoch": 2.540410132689988, |
| "grad_norm": 0.20562237412666165, |
| "learning_rate": 8.51206434316354e-06, |
| "loss": 0.2838, |
| "step": 2106 |
| }, |
| { |
| "epoch": 2.5416164053075994, |
| "grad_norm": 0.22643156797827918, |
| "learning_rate": 8.489722966934763e-06, |
| "loss": 0.2767, |
| "step": 2107 |
| }, |
| { |
| "epoch": 2.542822677925211, |
| "grad_norm": 0.2482978387199731, |
| "learning_rate": 8.467381590705988e-06, |
| "loss": 0.2795, |
| "step": 2108 |
| }, |
| { |
| "epoch": 2.5440289505428226, |
| "grad_norm": 0.2221033509722392, |
| "learning_rate": 8.445040214477212e-06, |
| "loss": 0.2896, |
| "step": 2109 |
| }, |
| { |
| "epoch": 2.545235223160434, |
| "grad_norm": 0.21304030830619042, |
| "learning_rate": 8.422698838248437e-06, |
| "loss": 0.2899, |
| "step": 2110 |
| }, |
| { |
| "epoch": 2.546441495778046, |
| "grad_norm": 0.21298340995217513, |
| "learning_rate": 8.40035746201966e-06, |
| "loss": 0.2985, |
| "step": 2111 |
| }, |
| { |
| "epoch": 2.5476477683956573, |
| "grad_norm": 0.2268107049383085, |
| "learning_rate": 8.378016085790886e-06, |
| "loss": 0.2876, |
| "step": 2112 |
| }, |
| { |
| "epoch": 2.5488540410132687, |
| "grad_norm": 0.2283769777933946, |
| "learning_rate": 8.35567470956211e-06, |
| "loss": 0.2937, |
| "step": 2113 |
| }, |
| { |
| "epoch": 2.5500603136308806, |
| "grad_norm": 0.20201400435301958, |
| "learning_rate": 8.333333333333334e-06, |
| "loss": 0.2736, |
| "step": 2114 |
| }, |
| { |
| "epoch": 2.551266586248492, |
| "grad_norm": 0.21118648069579432, |
| "learning_rate": 8.310991957104558e-06, |
| "loss": 0.2801, |
| "step": 2115 |
| }, |
| { |
| "epoch": 2.552472858866104, |
| "grad_norm": 0.21380147734469018, |
| "learning_rate": 8.288650580875784e-06, |
| "loss": 0.2879, |
| "step": 2116 |
| }, |
| { |
| "epoch": 2.5536791314837153, |
| "grad_norm": 0.24427500368171587, |
| "learning_rate": 8.266309204647006e-06, |
| "loss": 0.2828, |
| "step": 2117 |
| }, |
| { |
| "epoch": 2.554885404101327, |
| "grad_norm": 0.2104444247720201, |
| "learning_rate": 8.24396782841823e-06, |
| "loss": 0.284, |
| "step": 2118 |
| }, |
| { |
| "epoch": 2.5560916767189386, |
| "grad_norm": 0.21710117554724775, |
| "learning_rate": 8.221626452189456e-06, |
| "loss": 0.2864, |
| "step": 2119 |
| }, |
| { |
| "epoch": 2.55729794933655, |
| "grad_norm": 0.22717244072496412, |
| "learning_rate": 8.199285075960678e-06, |
| "loss": 0.2781, |
| "step": 2120 |
| }, |
| { |
| "epoch": 2.558504221954162, |
| "grad_norm": 0.2254436961531605, |
| "learning_rate": 8.176943699731904e-06, |
| "loss": 0.2782, |
| "step": 2121 |
| }, |
| { |
| "epoch": 2.5597104945717732, |
| "grad_norm": 0.21519589525491942, |
| "learning_rate": 8.154602323503128e-06, |
| "loss": 0.297, |
| "step": 2122 |
| }, |
| { |
| "epoch": 2.5609167671893847, |
| "grad_norm": 0.2320770217495183, |
| "learning_rate": 8.132260947274352e-06, |
| "loss": 0.3072, |
| "step": 2123 |
| }, |
| { |
| "epoch": 2.5621230398069965, |
| "grad_norm": 0.213745103139696, |
| "learning_rate": 8.109919571045576e-06, |
| "loss": 0.2864, |
| "step": 2124 |
| }, |
| { |
| "epoch": 2.563329312424608, |
| "grad_norm": 0.21520826517792885, |
| "learning_rate": 8.087578194816802e-06, |
| "loss": 0.2772, |
| "step": 2125 |
| }, |
| { |
| "epoch": 2.5645355850422193, |
| "grad_norm": 0.23267029432546807, |
| "learning_rate": 8.065236818588024e-06, |
| "loss": 0.2934, |
| "step": 2126 |
| }, |
| { |
| "epoch": 2.565741857659831, |
| "grad_norm": 0.2346619111675145, |
| "learning_rate": 8.04289544235925e-06, |
| "loss": 0.2997, |
| "step": 2127 |
| }, |
| { |
| "epoch": 2.5669481302774426, |
| "grad_norm": 0.21089577135395482, |
| "learning_rate": 8.020554066130474e-06, |
| "loss": 0.291, |
| "step": 2128 |
| }, |
| { |
| "epoch": 2.568154402895054, |
| "grad_norm": 0.23237289510955644, |
| "learning_rate": 7.998212689901698e-06, |
| "loss": 0.2821, |
| "step": 2129 |
| }, |
| { |
| "epoch": 2.569360675512666, |
| "grad_norm": 0.22800080953514776, |
| "learning_rate": 7.975871313672923e-06, |
| "loss": 0.2965, |
| "step": 2130 |
| }, |
| { |
| "epoch": 2.5705669481302773, |
| "grad_norm": 0.20687732257395336, |
| "learning_rate": 7.953529937444147e-06, |
| "loss": 0.2843, |
| "step": 2131 |
| }, |
| { |
| "epoch": 2.571773220747889, |
| "grad_norm": 0.2252633109054214, |
| "learning_rate": 7.931188561215372e-06, |
| "loss": 0.2774, |
| "step": 2132 |
| }, |
| { |
| "epoch": 2.5729794933655006, |
| "grad_norm": 0.21503546781105598, |
| "learning_rate": 7.908847184986595e-06, |
| "loss": 0.2943, |
| "step": 2133 |
| }, |
| { |
| "epoch": 2.5741857659831124, |
| "grad_norm": 0.2103849220087177, |
| "learning_rate": 7.88650580875782e-06, |
| "loss": 0.2823, |
| "step": 2134 |
| }, |
| { |
| "epoch": 2.575392038600724, |
| "grad_norm": 0.2356664422191725, |
| "learning_rate": 7.864164432529045e-06, |
| "loss": 0.2865, |
| "step": 2135 |
| }, |
| { |
| "epoch": 2.5765983112183353, |
| "grad_norm": 0.21854984713818312, |
| "learning_rate": 7.841823056300269e-06, |
| "loss": 0.2851, |
| "step": 2136 |
| }, |
| { |
| "epoch": 2.577804583835947, |
| "grad_norm": 0.2081407007815824, |
| "learning_rate": 7.819481680071493e-06, |
| "loss": 0.2814, |
| "step": 2137 |
| }, |
| { |
| "epoch": 2.5790108564535585, |
| "grad_norm": 0.22624538817631823, |
| "learning_rate": 7.797140303842717e-06, |
| "loss": 0.2918, |
| "step": 2138 |
| }, |
| { |
| "epoch": 2.58021712907117, |
| "grad_norm": 0.20932221958486957, |
| "learning_rate": 7.774798927613941e-06, |
| "loss": 0.2874, |
| "step": 2139 |
| }, |
| { |
| "epoch": 2.581423401688782, |
| "grad_norm": 0.2162037323396819, |
| "learning_rate": 7.752457551385165e-06, |
| "loss": 0.292, |
| "step": 2140 |
| }, |
| { |
| "epoch": 2.5826296743063932, |
| "grad_norm": 0.2249937624133852, |
| "learning_rate": 7.730116175156391e-06, |
| "loss": 0.292, |
| "step": 2141 |
| }, |
| { |
| "epoch": 2.5838359469240046, |
| "grad_norm": 0.1994108304524604, |
| "learning_rate": 7.707774798927613e-06, |
| "loss": 0.278, |
| "step": 2142 |
| }, |
| { |
| "epoch": 2.5850422195416165, |
| "grad_norm": 0.19608262203469157, |
| "learning_rate": 7.685433422698839e-06, |
| "loss": 0.2789, |
| "step": 2143 |
| }, |
| { |
| "epoch": 2.586248492159228, |
| "grad_norm": 0.2086529068204614, |
| "learning_rate": 7.663092046470063e-06, |
| "loss": 0.2905, |
| "step": 2144 |
| }, |
| { |
| "epoch": 2.5874547647768393, |
| "grad_norm": 0.20938557655487647, |
| "learning_rate": 7.640750670241287e-06, |
| "loss": 0.2802, |
| "step": 2145 |
| }, |
| { |
| "epoch": 2.588661037394451, |
| "grad_norm": 0.20753877295099624, |
| "learning_rate": 7.618409294012511e-06, |
| "loss": 0.2819, |
| "step": 2146 |
| }, |
| { |
| "epoch": 2.5898673100120626, |
| "grad_norm": 0.20991545279572527, |
| "learning_rate": 7.596067917783736e-06, |
| "loss": 0.2841, |
| "step": 2147 |
| }, |
| { |
| "epoch": 2.5910735826296745, |
| "grad_norm": 0.22350098729610313, |
| "learning_rate": 7.5737265415549595e-06, |
| "loss": 0.2981, |
| "step": 2148 |
| }, |
| { |
| "epoch": 2.592279855247286, |
| "grad_norm": 0.2123357064554487, |
| "learning_rate": 7.5513851653261844e-06, |
| "loss": 0.2821, |
| "step": 2149 |
| }, |
| { |
| "epoch": 2.5934861278648977, |
| "grad_norm": 0.1991868250972996, |
| "learning_rate": 7.529043789097409e-06, |
| "loss": 0.2809, |
| "step": 2150 |
| }, |
| { |
| "epoch": 2.594692400482509, |
| "grad_norm": 0.21821718127029568, |
| "learning_rate": 7.506702412868633e-06, |
| "loss": 0.2985, |
| "step": 2151 |
| }, |
| { |
| "epoch": 2.5958986731001206, |
| "grad_norm": 0.21983928145571835, |
| "learning_rate": 7.4843610366398576e-06, |
| "loss": 0.2893, |
| "step": 2152 |
| }, |
| { |
| "epoch": 2.5971049457177324, |
| "grad_norm": 0.20937779766909467, |
| "learning_rate": 7.462019660411082e-06, |
| "loss": 0.2876, |
| "step": 2153 |
| }, |
| { |
| "epoch": 2.598311218335344, |
| "grad_norm": 0.24440569242279062, |
| "learning_rate": 7.439678284182306e-06, |
| "loss": 0.3091, |
| "step": 2154 |
| }, |
| { |
| "epoch": 2.5995174909529553, |
| "grad_norm": 0.2094410586767038, |
| "learning_rate": 7.41733690795353e-06, |
| "loss": 0.2932, |
| "step": 2155 |
| }, |
| { |
| "epoch": 2.600723763570567, |
| "grad_norm": 0.22993205214780774, |
| "learning_rate": 7.394995531724755e-06, |
| "loss": 0.2908, |
| "step": 2156 |
| }, |
| { |
| "epoch": 2.6019300361881785, |
| "grad_norm": 0.232233833116648, |
| "learning_rate": 7.372654155495978e-06, |
| "loss": 0.2849, |
| "step": 2157 |
| }, |
| { |
| "epoch": 2.60313630880579, |
| "grad_norm": 0.20920454232370092, |
| "learning_rate": 7.350312779267203e-06, |
| "loss": 0.2904, |
| "step": 2158 |
| }, |
| { |
| "epoch": 2.604342581423402, |
| "grad_norm": 0.22698018377587673, |
| "learning_rate": 7.327971403038428e-06, |
| "loss": 0.2888, |
| "step": 2159 |
| }, |
| { |
| "epoch": 2.605548854041013, |
| "grad_norm": 0.20161504934121768, |
| "learning_rate": 7.305630026809651e-06, |
| "loss": 0.2715, |
| "step": 2160 |
| }, |
| { |
| "epoch": 2.6067551266586246, |
| "grad_norm": 0.2060420861005284, |
| "learning_rate": 7.283288650580876e-06, |
| "loss": 0.2979, |
| "step": 2161 |
| }, |
| { |
| "epoch": 2.6079613992762365, |
| "grad_norm": 0.22127745692072953, |
| "learning_rate": 7.260947274352101e-06, |
| "loss": 0.2824, |
| "step": 2162 |
| }, |
| { |
| "epoch": 2.609167671893848, |
| "grad_norm": 0.23976066707695107, |
| "learning_rate": 7.238605898123325e-06, |
| "loss": 0.3018, |
| "step": 2163 |
| }, |
| { |
| "epoch": 2.6103739445114593, |
| "grad_norm": 0.21430343796962054, |
| "learning_rate": 7.216264521894549e-06, |
| "loss": 0.29, |
| "step": 2164 |
| }, |
| { |
| "epoch": 2.611580217129071, |
| "grad_norm": 0.22023668390455303, |
| "learning_rate": 7.193923145665773e-06, |
| "loss": 0.2942, |
| "step": 2165 |
| }, |
| { |
| "epoch": 2.6127864897466826, |
| "grad_norm": 0.21547955802919266, |
| "learning_rate": 7.171581769436998e-06, |
| "loss": 0.2808, |
| "step": 2166 |
| }, |
| { |
| "epoch": 2.6139927623642945, |
| "grad_norm": 0.23149386933699645, |
| "learning_rate": 7.1492403932082214e-06, |
| "loss": 0.3099, |
| "step": 2167 |
| }, |
| { |
| "epoch": 2.615199034981906, |
| "grad_norm": 0.23126252331722508, |
| "learning_rate": 7.126899016979446e-06, |
| "loss": 0.2752, |
| "step": 2168 |
| }, |
| { |
| "epoch": 2.6164053075995177, |
| "grad_norm": 0.19958085393309985, |
| "learning_rate": 7.104557640750671e-06, |
| "loss": 0.2746, |
| "step": 2169 |
| }, |
| { |
| "epoch": 2.617611580217129, |
| "grad_norm": 0.2238611225450866, |
| "learning_rate": 7.0822162645218945e-06, |
| "loss": 0.2851, |
| "step": 2170 |
| }, |
| { |
| "epoch": 2.6188178528347406, |
| "grad_norm": 0.2352732384970299, |
| "learning_rate": 7.0598748882931195e-06, |
| "loss": 0.2922, |
| "step": 2171 |
| }, |
| { |
| "epoch": 2.6200241254523524, |
| "grad_norm": 0.22415056452326848, |
| "learning_rate": 7.037533512064344e-06, |
| "loss": 0.2673, |
| "step": 2172 |
| }, |
| { |
| "epoch": 2.621230398069964, |
| "grad_norm": 0.21897180401823096, |
| "learning_rate": 7.015192135835568e-06, |
| "loss": 0.2812, |
| "step": 2173 |
| }, |
| { |
| "epoch": 2.6224366706875752, |
| "grad_norm": 0.23020781529953618, |
| "learning_rate": 6.992850759606792e-06, |
| "loss": 0.2853, |
| "step": 2174 |
| }, |
| { |
| "epoch": 2.623642943305187, |
| "grad_norm": 0.21875426785559132, |
| "learning_rate": 6.970509383378017e-06, |
| "loss": 0.2909, |
| "step": 2175 |
| }, |
| { |
| "epoch": 2.6248492159227985, |
| "grad_norm": 0.2816490674500168, |
| "learning_rate": 6.94816800714924e-06, |
| "loss": 0.2988, |
| "step": 2176 |
| }, |
| { |
| "epoch": 2.62605548854041, |
| "grad_norm": 0.21486929348764425, |
| "learning_rate": 6.925826630920465e-06, |
| "loss": 0.2792, |
| "step": 2177 |
| }, |
| { |
| "epoch": 2.627261761158022, |
| "grad_norm": 0.2301825259699349, |
| "learning_rate": 6.90348525469169e-06, |
| "loss": 0.2792, |
| "step": 2178 |
| }, |
| { |
| "epoch": 2.628468033775633, |
| "grad_norm": 0.2253863912294859, |
| "learning_rate": 6.881143878462913e-06, |
| "loss": 0.2953, |
| "step": 2179 |
| }, |
| { |
| "epoch": 2.6296743063932446, |
| "grad_norm": 0.2169639675897766, |
| "learning_rate": 6.858802502234138e-06, |
| "loss": 0.2891, |
| "step": 2180 |
| }, |
| { |
| "epoch": 2.6308805790108565, |
| "grad_norm": 0.21726317823189514, |
| "learning_rate": 6.836461126005363e-06, |
| "loss": 0.291, |
| "step": 2181 |
| }, |
| { |
| "epoch": 2.632086851628468, |
| "grad_norm": 0.19617546763397464, |
| "learning_rate": 6.814119749776586e-06, |
| "loss": 0.2647, |
| "step": 2182 |
| }, |
| { |
| "epoch": 2.6332931242460798, |
| "grad_norm": 0.21554251282986897, |
| "learning_rate": 6.791778373547811e-06, |
| "loss": 0.2843, |
| "step": 2183 |
| }, |
| { |
| "epoch": 2.634499396863691, |
| "grad_norm": 0.2459135828666879, |
| "learning_rate": 6.769436997319035e-06, |
| "loss": 0.3009, |
| "step": 2184 |
| }, |
| { |
| "epoch": 2.635705669481303, |
| "grad_norm": 0.21621503077120693, |
| "learning_rate": 6.747095621090259e-06, |
| "loss": 0.2952, |
| "step": 2185 |
| }, |
| { |
| "epoch": 2.6369119420989144, |
| "grad_norm": 0.20740297901076743, |
| "learning_rate": 6.724754244861483e-06, |
| "loss": 0.295, |
| "step": 2186 |
| }, |
| { |
| "epoch": 2.638118214716526, |
| "grad_norm": 0.21029148372921153, |
| "learning_rate": 6.702412868632708e-06, |
| "loss": 0.2795, |
| "step": 2187 |
| }, |
| { |
| "epoch": 2.6393244873341377, |
| "grad_norm": 0.22609754669207283, |
| "learning_rate": 6.6800714924039315e-06, |
| "loss": 0.2949, |
| "step": 2188 |
| }, |
| { |
| "epoch": 2.640530759951749, |
| "grad_norm": 0.22113015986707052, |
| "learning_rate": 6.6577301161751565e-06, |
| "loss": 0.283, |
| "step": 2189 |
| }, |
| { |
| "epoch": 2.6417370325693605, |
| "grad_norm": 0.21290296828182953, |
| "learning_rate": 6.635388739946381e-06, |
| "loss": 0.3058, |
| "step": 2190 |
| }, |
| { |
| "epoch": 2.6429433051869724, |
| "grad_norm": 0.20663250547689224, |
| "learning_rate": 6.613047363717605e-06, |
| "loss": 0.2704, |
| "step": 2191 |
| }, |
| { |
| "epoch": 2.644149577804584, |
| "grad_norm": 0.2194261553038497, |
| "learning_rate": 6.5907059874888296e-06, |
| "loss": 0.2822, |
| "step": 2192 |
| }, |
| { |
| "epoch": 2.6453558504221952, |
| "grad_norm": 0.2378506340578728, |
| "learning_rate": 6.5683646112600545e-06, |
| "loss": 0.2898, |
| "step": 2193 |
| }, |
| { |
| "epoch": 2.646562123039807, |
| "grad_norm": 0.2242127329287937, |
| "learning_rate": 6.546023235031279e-06, |
| "loss": 0.2779, |
| "step": 2194 |
| }, |
| { |
| "epoch": 2.6477683956574185, |
| "grad_norm": 0.21847059262012553, |
| "learning_rate": 6.523681858802503e-06, |
| "loss": 0.2939, |
| "step": 2195 |
| }, |
| { |
| "epoch": 2.64897466827503, |
| "grad_norm": 0.231399791978887, |
| "learning_rate": 6.501340482573727e-06, |
| "loss": 0.279, |
| "step": 2196 |
| }, |
| { |
| "epoch": 2.650180940892642, |
| "grad_norm": 0.2116623580528426, |
| "learning_rate": 6.478999106344952e-06, |
| "loss": 0.2859, |
| "step": 2197 |
| }, |
| { |
| "epoch": 2.651387213510253, |
| "grad_norm": 0.1980992165403409, |
| "learning_rate": 6.456657730116175e-06, |
| "loss": 0.2804, |
| "step": 2198 |
| }, |
| { |
| "epoch": 2.652593486127865, |
| "grad_norm": 0.24171516250636746, |
| "learning_rate": 6.4343163538874e-06, |
| "loss": 0.2951, |
| "step": 2199 |
| }, |
| { |
| "epoch": 2.6537997587454765, |
| "grad_norm": 0.21184774792981634, |
| "learning_rate": 6.411974977658625e-06, |
| "loss": 0.2931, |
| "step": 2200 |
| }, |
| { |
| "epoch": 2.6550060313630883, |
| "grad_norm": 0.21738233707648502, |
| "learning_rate": 6.389633601429848e-06, |
| "loss": 0.2785, |
| "step": 2201 |
| }, |
| { |
| "epoch": 2.6562123039806997, |
| "grad_norm": 0.2127604344024817, |
| "learning_rate": 6.367292225201073e-06, |
| "loss": 0.2868, |
| "step": 2202 |
| }, |
| { |
| "epoch": 2.657418576598311, |
| "grad_norm": 0.1931954397670524, |
| "learning_rate": 6.344950848972298e-06, |
| "loss": 0.2665, |
| "step": 2203 |
| }, |
| { |
| "epoch": 2.658624849215923, |
| "grad_norm": 0.20838444267534595, |
| "learning_rate": 6.322609472743521e-06, |
| "loss": 0.2807, |
| "step": 2204 |
| }, |
| { |
| "epoch": 2.6598311218335344, |
| "grad_norm": 0.2211018154320071, |
| "learning_rate": 6.300268096514745e-06, |
| "loss": 0.2813, |
| "step": 2205 |
| }, |
| { |
| "epoch": 2.661037394451146, |
| "grad_norm": 0.2168479274201597, |
| "learning_rate": 6.27792672028597e-06, |
| "loss": 0.2765, |
| "step": 2206 |
| }, |
| { |
| "epoch": 2.6622436670687577, |
| "grad_norm": 0.2213334733094897, |
| "learning_rate": 6.2555853440571934e-06, |
| "loss": 0.2798, |
| "step": 2207 |
| }, |
| { |
| "epoch": 2.663449939686369, |
| "grad_norm": 0.19954163195183472, |
| "learning_rate": 6.233243967828418e-06, |
| "loss": 0.2656, |
| "step": 2208 |
| }, |
| { |
| "epoch": 2.6646562123039805, |
| "grad_norm": 0.22078742024428197, |
| "learning_rate": 6.2109025915996425e-06, |
| "loss": 0.286, |
| "step": 2209 |
| }, |
| { |
| "epoch": 2.6658624849215924, |
| "grad_norm": 0.22161496613081794, |
| "learning_rate": 6.188561215370867e-06, |
| "loss": 0.2857, |
| "step": 2210 |
| }, |
| { |
| "epoch": 2.667068757539204, |
| "grad_norm": 0.21212036342000515, |
| "learning_rate": 6.1662198391420915e-06, |
| "loss": 0.286, |
| "step": 2211 |
| }, |
| { |
| "epoch": 2.668275030156815, |
| "grad_norm": 0.20187880736220248, |
| "learning_rate": 6.1438784629133156e-06, |
| "loss": 0.2815, |
| "step": 2212 |
| }, |
| { |
| "epoch": 2.669481302774427, |
| "grad_norm": 0.22474273562685815, |
| "learning_rate": 6.1215370866845405e-06, |
| "loss": 0.2982, |
| "step": 2213 |
| }, |
| { |
| "epoch": 2.6706875753920385, |
| "grad_norm": 0.20611518128991452, |
| "learning_rate": 6.099195710455765e-06, |
| "loss": 0.2976, |
| "step": 2214 |
| }, |
| { |
| "epoch": 2.67189384800965, |
| "grad_norm": 0.20695572664023934, |
| "learning_rate": 6.076854334226989e-06, |
| "loss": 0.2896, |
| "step": 2215 |
| }, |
| { |
| "epoch": 2.6731001206272618, |
| "grad_norm": 0.21701647489801484, |
| "learning_rate": 6.054512957998213e-06, |
| "loss": 0.306, |
| "step": 2216 |
| }, |
| { |
| "epoch": 2.674306393244873, |
| "grad_norm": 0.2390978357229728, |
| "learning_rate": 6.032171581769437e-06, |
| "loss": 0.2889, |
| "step": 2217 |
| }, |
| { |
| "epoch": 2.675512665862485, |
| "grad_norm": 0.2248591636170576, |
| "learning_rate": 6.009830205540662e-06, |
| "loss": 0.3062, |
| "step": 2218 |
| }, |
| { |
| "epoch": 2.6767189384800965, |
| "grad_norm": 0.20736477751728932, |
| "learning_rate": 5.987488829311886e-06, |
| "loss": 0.2959, |
| "step": 2219 |
| }, |
| { |
| "epoch": 2.6779252110977083, |
| "grad_norm": 0.20115538299047425, |
| "learning_rate": 5.96514745308311e-06, |
| "loss": 0.2721, |
| "step": 2220 |
| }, |
| { |
| "epoch": 2.6791314837153197, |
| "grad_norm": 0.20571833148964577, |
| "learning_rate": 5.942806076854335e-06, |
| "loss": 0.2871, |
| "step": 2221 |
| }, |
| { |
| "epoch": 2.680337756332931, |
| "grad_norm": 0.22354102926280847, |
| "learning_rate": 5.920464700625559e-06, |
| "loss": 0.2896, |
| "step": 2222 |
| }, |
| { |
| "epoch": 2.681544028950543, |
| "grad_norm": 0.20983791062606558, |
| "learning_rate": 5.898123324396783e-06, |
| "loss": 0.2887, |
| "step": 2223 |
| }, |
| { |
| "epoch": 2.6827503015681544, |
| "grad_norm": 0.2021459077857852, |
| "learning_rate": 5.875781948168008e-06, |
| "loss": 0.2832, |
| "step": 2224 |
| }, |
| { |
| "epoch": 2.683956574185766, |
| "grad_norm": 0.21740950707741213, |
| "learning_rate": 5.853440571939232e-06, |
| "loss": 0.2653, |
| "step": 2225 |
| }, |
| { |
| "epoch": 2.6851628468033777, |
| "grad_norm": 0.22039114740127552, |
| "learning_rate": 5.831099195710455e-06, |
| "loss": 0.2971, |
| "step": 2226 |
| }, |
| { |
| "epoch": 2.686369119420989, |
| "grad_norm": 0.22684638806753404, |
| "learning_rate": 5.80875781948168e-06, |
| "loss": 0.2937, |
| "step": 2227 |
| }, |
| { |
| "epoch": 2.6875753920386005, |
| "grad_norm": 0.19918080045760725, |
| "learning_rate": 5.786416443252904e-06, |
| "loss": 0.2867, |
| "step": 2228 |
| }, |
| { |
| "epoch": 2.6887816646562124, |
| "grad_norm": 0.227332586710256, |
| "learning_rate": 5.7640750670241285e-06, |
| "loss": 0.2726, |
| "step": 2229 |
| }, |
| { |
| "epoch": 2.689987937273824, |
| "grad_norm": 0.2210279769730908, |
| "learning_rate": 5.741733690795353e-06, |
| "loss": 0.2923, |
| "step": 2230 |
| }, |
| { |
| "epoch": 2.691194209891435, |
| "grad_norm": 0.21827221208506087, |
| "learning_rate": 5.7193923145665775e-06, |
| "loss": 0.2965, |
| "step": 2231 |
| }, |
| { |
| "epoch": 2.692400482509047, |
| "grad_norm": 0.2045014578759283, |
| "learning_rate": 5.697050938337802e-06, |
| "loss": 0.2914, |
| "step": 2232 |
| }, |
| { |
| "epoch": 2.6936067551266585, |
| "grad_norm": 0.6946714666447911, |
| "learning_rate": 5.6747095621090265e-06, |
| "loss": 0.3133, |
| "step": 2233 |
| }, |
| { |
| "epoch": 2.6948130277442703, |
| "grad_norm": 0.21370291737243427, |
| "learning_rate": 5.652368185880251e-06, |
| "loss": 0.2715, |
| "step": 2234 |
| }, |
| { |
| "epoch": 2.6960193003618818, |
| "grad_norm": 0.2044609100427451, |
| "learning_rate": 5.630026809651475e-06, |
| "loss": 0.2926, |
| "step": 2235 |
| }, |
| { |
| "epoch": 2.6972255729794936, |
| "grad_norm": 0.22829735674613444, |
| "learning_rate": 5.607685433422699e-06, |
| "loss": 0.2833, |
| "step": 2236 |
| }, |
| { |
| "epoch": 2.698431845597105, |
| "grad_norm": 0.26018311521323667, |
| "learning_rate": 5.585344057193923e-06, |
| "loss": 0.2839, |
| "step": 2237 |
| }, |
| { |
| "epoch": 2.6996381182147164, |
| "grad_norm": 0.2086058178570994, |
| "learning_rate": 5.563002680965148e-06, |
| "loss": 0.2895, |
| "step": 2238 |
| }, |
| { |
| "epoch": 2.7008443908323283, |
| "grad_norm": 0.2050257982382011, |
| "learning_rate": 5.540661304736372e-06, |
| "loss": 0.2828, |
| "step": 2239 |
| }, |
| { |
| "epoch": 2.7020506634499397, |
| "grad_norm": 0.2139514048129372, |
| "learning_rate": 5.518319928507596e-06, |
| "loss": 0.2867, |
| "step": 2240 |
| }, |
| { |
| "epoch": 2.703256936067551, |
| "grad_norm": 0.2165520506724918, |
| "learning_rate": 5.495978552278821e-06, |
| "loss": 0.2885, |
| "step": 2241 |
| }, |
| { |
| "epoch": 2.704463208685163, |
| "grad_norm": 0.21443900046194145, |
| "learning_rate": 5.473637176050045e-06, |
| "loss": 0.2856, |
| "step": 2242 |
| }, |
| { |
| "epoch": 2.7056694813027744, |
| "grad_norm": 0.21335757069783218, |
| "learning_rate": 5.451295799821269e-06, |
| "loss": 0.2975, |
| "step": 2243 |
| }, |
| { |
| "epoch": 2.706875753920386, |
| "grad_norm": 0.20145288057580185, |
| "learning_rate": 5.428954423592494e-06, |
| "loss": 0.2952, |
| "step": 2244 |
| }, |
| { |
| "epoch": 2.7080820265379977, |
| "grad_norm": 0.22761820406606897, |
| "learning_rate": 5.406613047363718e-06, |
| "loss": 0.2879, |
| "step": 2245 |
| }, |
| { |
| "epoch": 2.709288299155609, |
| "grad_norm": 0.2364015496890641, |
| "learning_rate": 5.384271671134942e-06, |
| "loss": 0.281, |
| "step": 2246 |
| }, |
| { |
| "epoch": 2.7104945717732205, |
| "grad_norm": 0.21753945940012162, |
| "learning_rate": 5.361930294906166e-06, |
| "loss": 0.2964, |
| "step": 2247 |
| }, |
| { |
| "epoch": 2.7117008443908324, |
| "grad_norm": 0.21327033716673088, |
| "learning_rate": 5.33958891867739e-06, |
| "loss": 0.296, |
| "step": 2248 |
| }, |
| { |
| "epoch": 2.712907117008444, |
| "grad_norm": 0.1951078699418731, |
| "learning_rate": 5.317247542448615e-06, |
| "loss": 0.2791, |
| "step": 2249 |
| }, |
| { |
| "epoch": 2.7141133896260556, |
| "grad_norm": 0.20240742137700055, |
| "learning_rate": 5.294906166219839e-06, |
| "loss": 0.2924, |
| "step": 2250 |
| }, |
| { |
| "epoch": 2.715319662243667, |
| "grad_norm": 0.22220578965053128, |
| "learning_rate": 5.2725647899910635e-06, |
| "loss": 0.3065, |
| "step": 2251 |
| }, |
| { |
| "epoch": 2.716525934861279, |
| "grad_norm": 0.20214399599477811, |
| "learning_rate": 5.250223413762288e-06, |
| "loss": 0.2744, |
| "step": 2252 |
| }, |
| { |
| "epoch": 2.7177322074788903, |
| "grad_norm": 0.21155302019530836, |
| "learning_rate": 5.2278820375335125e-06, |
| "loss": 0.2715, |
| "step": 2253 |
| }, |
| { |
| "epoch": 2.7189384800965017, |
| "grad_norm": 0.19608141646891808, |
| "learning_rate": 5.205540661304737e-06, |
| "loss": 0.2781, |
| "step": 2254 |
| }, |
| { |
| "epoch": 2.7201447527141136, |
| "grad_norm": 0.20753070114799987, |
| "learning_rate": 5.1831992850759615e-06, |
| "loss": 0.2807, |
| "step": 2255 |
| }, |
| { |
| "epoch": 2.721351025331725, |
| "grad_norm": 0.19788509046006095, |
| "learning_rate": 5.160857908847186e-06, |
| "loss": 0.2762, |
| "step": 2256 |
| }, |
| { |
| "epoch": 2.7225572979493364, |
| "grad_norm": 0.2066145277274399, |
| "learning_rate": 5.138516532618409e-06, |
| "loss": 0.3045, |
| "step": 2257 |
| }, |
| { |
| "epoch": 2.7237635705669483, |
| "grad_norm": 0.20139286285522892, |
| "learning_rate": 5.116175156389634e-06, |
| "loss": 0.2844, |
| "step": 2258 |
| }, |
| { |
| "epoch": 2.7249698431845597, |
| "grad_norm": 0.20798235148380942, |
| "learning_rate": 5.093833780160858e-06, |
| "loss": 0.2853, |
| "step": 2259 |
| }, |
| { |
| "epoch": 2.726176115802171, |
| "grad_norm": 0.2091974900829975, |
| "learning_rate": 5.071492403932082e-06, |
| "loss": 0.2756, |
| "step": 2260 |
| }, |
| { |
| "epoch": 2.727382388419783, |
| "grad_norm": 0.21745865410514648, |
| "learning_rate": 5.049151027703307e-06, |
| "loss": 0.2894, |
| "step": 2261 |
| }, |
| { |
| "epoch": 2.7285886610373944, |
| "grad_norm": 0.19485099560282385, |
| "learning_rate": 5.026809651474531e-06, |
| "loss": 0.2761, |
| "step": 2262 |
| }, |
| { |
| "epoch": 2.729794933655006, |
| "grad_norm": 0.19149290082897566, |
| "learning_rate": 5.004468275245756e-06, |
| "loss": 0.2888, |
| "step": 2263 |
| }, |
| { |
| "epoch": 2.7310012062726177, |
| "grad_norm": 0.20599673735385637, |
| "learning_rate": 4.98212689901698e-06, |
| "loss": 0.2884, |
| "step": 2264 |
| }, |
| { |
| "epoch": 2.732207478890229, |
| "grad_norm": 0.20713273074776942, |
| "learning_rate": 4.959785522788204e-06, |
| "loss": 0.2874, |
| "step": 2265 |
| }, |
| { |
| "epoch": 2.7334137515078405, |
| "grad_norm": 0.2081817873109111, |
| "learning_rate": 4.937444146559428e-06, |
| "loss": 0.2944, |
| "step": 2266 |
| }, |
| { |
| "epoch": 2.7346200241254524, |
| "grad_norm": 0.20553774261680427, |
| "learning_rate": 4.915102770330652e-06, |
| "loss": 0.2716, |
| "step": 2267 |
| }, |
| { |
| "epoch": 2.7358262967430638, |
| "grad_norm": 0.20501194493617642, |
| "learning_rate": 4.892761394101876e-06, |
| "loss": 0.3012, |
| "step": 2268 |
| }, |
| { |
| "epoch": 2.7370325693606756, |
| "grad_norm": 0.1990487984580353, |
| "learning_rate": 4.870420017873101e-06, |
| "loss": 0.2941, |
| "step": 2269 |
| }, |
| { |
| "epoch": 2.738238841978287, |
| "grad_norm": 0.1983334601951314, |
| "learning_rate": 4.848078641644325e-06, |
| "loss": 0.2753, |
| "step": 2270 |
| }, |
| { |
| "epoch": 2.739445114595899, |
| "grad_norm": 0.19403916283019812, |
| "learning_rate": 4.8257372654155495e-06, |
| "loss": 0.2862, |
| "step": 2271 |
| }, |
| { |
| "epoch": 2.7406513872135103, |
| "grad_norm": 0.21538723639291782, |
| "learning_rate": 4.803395889186774e-06, |
| "loss": 0.2943, |
| "step": 2272 |
| }, |
| { |
| "epoch": 2.7418576598311217, |
| "grad_norm": 0.2375574289392646, |
| "learning_rate": 4.7810545129579985e-06, |
| "loss": 0.2887, |
| "step": 2273 |
| }, |
| { |
| "epoch": 2.7430639324487336, |
| "grad_norm": 0.19945442614951428, |
| "learning_rate": 4.758713136729223e-06, |
| "loss": 0.3011, |
| "step": 2274 |
| }, |
| { |
| "epoch": 2.744270205066345, |
| "grad_norm": 0.20199363951824748, |
| "learning_rate": 4.7363717605004475e-06, |
| "loss": 0.2942, |
| "step": 2275 |
| }, |
| { |
| "epoch": 2.7454764776839564, |
| "grad_norm": 0.2059245247625709, |
| "learning_rate": 4.714030384271672e-06, |
| "loss": 0.2876, |
| "step": 2276 |
| }, |
| { |
| "epoch": 2.7466827503015683, |
| "grad_norm": 0.20187465821580058, |
| "learning_rate": 4.691689008042896e-06, |
| "loss": 0.2844, |
| "step": 2277 |
| }, |
| { |
| "epoch": 2.7478890229191797, |
| "grad_norm": 0.20636177253183094, |
| "learning_rate": 4.66934763181412e-06, |
| "loss": 0.2796, |
| "step": 2278 |
| }, |
| { |
| "epoch": 2.749095295536791, |
| "grad_norm": 0.20723033389487827, |
| "learning_rate": 4.647006255585344e-06, |
| "loss": 0.2767, |
| "step": 2279 |
| }, |
| { |
| "epoch": 2.750301568154403, |
| "grad_norm": 0.21174347393266027, |
| "learning_rate": 4.624664879356569e-06, |
| "loss": 0.3188, |
| "step": 2280 |
| }, |
| { |
| "epoch": 2.7515078407720144, |
| "grad_norm": 0.1978681060454767, |
| "learning_rate": 4.602323503127793e-06, |
| "loss": 0.2784, |
| "step": 2281 |
| }, |
| { |
| "epoch": 2.752714113389626, |
| "grad_norm": 0.18744202466276072, |
| "learning_rate": 4.579982126899017e-06, |
| "loss": 0.2682, |
| "step": 2282 |
| }, |
| { |
| "epoch": 2.7539203860072377, |
| "grad_norm": 0.21209688003204358, |
| "learning_rate": 4.557640750670242e-06, |
| "loss": 0.285, |
| "step": 2283 |
| }, |
| { |
| "epoch": 2.755126658624849, |
| "grad_norm": 0.19892738867053703, |
| "learning_rate": 4.535299374441466e-06, |
| "loss": 0.2714, |
| "step": 2284 |
| }, |
| { |
| "epoch": 2.756332931242461, |
| "grad_norm": 0.2041608394139942, |
| "learning_rate": 4.51295799821269e-06, |
| "loss": 0.2913, |
| "step": 2285 |
| }, |
| { |
| "epoch": 2.7575392038600723, |
| "grad_norm": 0.19915952593238026, |
| "learning_rate": 4.490616621983915e-06, |
| "loss": 0.274, |
| "step": 2286 |
| }, |
| { |
| "epoch": 2.758745476477684, |
| "grad_norm": 0.2044806484365547, |
| "learning_rate": 4.468275245755138e-06, |
| "loss": 0.2853, |
| "step": 2287 |
| }, |
| { |
| "epoch": 2.7599517490952956, |
| "grad_norm": 0.19573371237314433, |
| "learning_rate": 4.445933869526362e-06, |
| "loss": 0.2738, |
| "step": 2288 |
| }, |
| { |
| "epoch": 2.761158021712907, |
| "grad_norm": 0.2106092030662992, |
| "learning_rate": 4.423592493297587e-06, |
| "loss": 0.2972, |
| "step": 2289 |
| }, |
| { |
| "epoch": 2.762364294330519, |
| "grad_norm": 0.19573259291033523, |
| "learning_rate": 4.401251117068811e-06, |
| "loss": 0.2829, |
| "step": 2290 |
| }, |
| { |
| "epoch": 2.7635705669481303, |
| "grad_norm": 0.20564658398999378, |
| "learning_rate": 4.3789097408400355e-06, |
| "loss": 0.2976, |
| "step": 2291 |
| }, |
| { |
| "epoch": 2.7647768395657417, |
| "grad_norm": 0.1879596479464295, |
| "learning_rate": 4.35656836461126e-06, |
| "loss": 0.2705, |
| "step": 2292 |
| }, |
| { |
| "epoch": 2.7659831121833536, |
| "grad_norm": 0.20028084619699768, |
| "learning_rate": 4.3342269883824845e-06, |
| "loss": 0.2898, |
| "step": 2293 |
| }, |
| { |
| "epoch": 2.767189384800965, |
| "grad_norm": 0.204373079980006, |
| "learning_rate": 4.3118856121537094e-06, |
| "loss": 0.2854, |
| "step": 2294 |
| }, |
| { |
| "epoch": 2.7683956574185764, |
| "grad_norm": 0.19758717833265702, |
| "learning_rate": 4.2895442359249335e-06, |
| "loss": 0.2802, |
| "step": 2295 |
| }, |
| { |
| "epoch": 2.7696019300361883, |
| "grad_norm": 0.21023108969069418, |
| "learning_rate": 4.267202859696158e-06, |
| "loss": 0.284, |
| "step": 2296 |
| }, |
| { |
| "epoch": 2.7708082026537997, |
| "grad_norm": 0.19027759880066697, |
| "learning_rate": 4.244861483467382e-06, |
| "loss": 0.2821, |
| "step": 2297 |
| }, |
| { |
| "epoch": 2.772014475271411, |
| "grad_norm": 0.18871657043933537, |
| "learning_rate": 4.222520107238606e-06, |
| "loss": 0.2729, |
| "step": 2298 |
| }, |
| { |
| "epoch": 2.773220747889023, |
| "grad_norm": 0.20709511269571762, |
| "learning_rate": 4.20017873100983e-06, |
| "loss": 0.2768, |
| "step": 2299 |
| }, |
| { |
| "epoch": 2.7744270205066344, |
| "grad_norm": 0.20297218284984314, |
| "learning_rate": 4.177837354781055e-06, |
| "loss": 0.2869, |
| "step": 2300 |
| }, |
| { |
| "epoch": 2.7756332931242462, |
| "grad_norm": 0.19887378780810883, |
| "learning_rate": 4.155495978552279e-06, |
| "loss": 0.3008, |
| "step": 2301 |
| }, |
| { |
| "epoch": 2.7768395657418576, |
| "grad_norm": 0.2118503034579533, |
| "learning_rate": 4.133154602323503e-06, |
| "loss": 0.2828, |
| "step": 2302 |
| }, |
| { |
| "epoch": 2.7780458383594695, |
| "grad_norm": 0.2219706988384966, |
| "learning_rate": 4.110813226094728e-06, |
| "loss": 0.3062, |
| "step": 2303 |
| }, |
| { |
| "epoch": 2.779252110977081, |
| "grad_norm": 0.25701330993635146, |
| "learning_rate": 4.088471849865952e-06, |
| "loss": 0.2745, |
| "step": 2304 |
| }, |
| { |
| "epoch": 2.7804583835946923, |
| "grad_norm": 0.2234143696898838, |
| "learning_rate": 4.066130473637176e-06, |
| "loss": 0.291, |
| "step": 2305 |
| }, |
| { |
| "epoch": 2.781664656212304, |
| "grad_norm": 0.1941194837681158, |
| "learning_rate": 4.043789097408401e-06, |
| "loss": 0.2877, |
| "step": 2306 |
| }, |
| { |
| "epoch": 2.7828709288299156, |
| "grad_norm": 0.2064300988757331, |
| "learning_rate": 4.021447721179625e-06, |
| "loss": 0.2884, |
| "step": 2307 |
| }, |
| { |
| "epoch": 2.784077201447527, |
| "grad_norm": 0.21140547687570724, |
| "learning_rate": 3.999106344950849e-06, |
| "loss": 0.298, |
| "step": 2308 |
| }, |
| { |
| "epoch": 2.785283474065139, |
| "grad_norm": 0.19951947203291173, |
| "learning_rate": 3.976764968722073e-06, |
| "loss": 0.2699, |
| "step": 2309 |
| }, |
| { |
| "epoch": 2.7864897466827503, |
| "grad_norm": 0.20431837354268684, |
| "learning_rate": 3.954423592493297e-06, |
| "loss": 0.2865, |
| "step": 2310 |
| }, |
| { |
| "epoch": 2.7876960193003617, |
| "grad_norm": 0.19834889240552475, |
| "learning_rate": 3.932082216264522e-06, |
| "loss": 0.2905, |
| "step": 2311 |
| }, |
| { |
| "epoch": 2.7889022919179736, |
| "grad_norm": 0.21695363502888282, |
| "learning_rate": 3.909740840035746e-06, |
| "loss": 0.2763, |
| "step": 2312 |
| }, |
| { |
| "epoch": 2.790108564535585, |
| "grad_norm": 0.19273556462979177, |
| "learning_rate": 3.8873994638069705e-06, |
| "loss": 0.2774, |
| "step": 2313 |
| }, |
| { |
| "epoch": 2.7913148371531964, |
| "grad_norm": 0.19451733876401245, |
| "learning_rate": 3.8650580875781954e-06, |
| "loss": 0.29, |
| "step": 2314 |
| }, |
| { |
| "epoch": 2.7925211097708083, |
| "grad_norm": 0.1964077558155501, |
| "learning_rate": 3.8427167113494195e-06, |
| "loss": 0.2802, |
| "step": 2315 |
| }, |
| { |
| "epoch": 2.7937273823884197, |
| "grad_norm": 0.21179533332456585, |
| "learning_rate": 3.820375335120644e-06, |
| "loss": 0.3013, |
| "step": 2316 |
| }, |
| { |
| "epoch": 2.7949336550060315, |
| "grad_norm": 0.19720588063541614, |
| "learning_rate": 3.798033958891868e-06, |
| "loss": 0.2679, |
| "step": 2317 |
| }, |
| { |
| "epoch": 2.796139927623643, |
| "grad_norm": 0.20036859315386968, |
| "learning_rate": 3.7756925826630922e-06, |
| "loss": 0.2951, |
| "step": 2318 |
| }, |
| { |
| "epoch": 2.797346200241255, |
| "grad_norm": 0.205713796416112, |
| "learning_rate": 3.7533512064343163e-06, |
| "loss": 0.2753, |
| "step": 2319 |
| }, |
| { |
| "epoch": 2.798552472858866, |
| "grad_norm": 0.2086376396974652, |
| "learning_rate": 3.731009830205541e-06, |
| "loss": 0.2806, |
| "step": 2320 |
| }, |
| { |
| "epoch": 2.7997587454764776, |
| "grad_norm": 0.19776511558296467, |
| "learning_rate": 3.708668453976765e-06, |
| "loss": 0.2825, |
| "step": 2321 |
| }, |
| { |
| "epoch": 2.8009650180940895, |
| "grad_norm": 0.1979973932225097, |
| "learning_rate": 3.686327077747989e-06, |
| "loss": 0.2925, |
| "step": 2322 |
| }, |
| { |
| "epoch": 2.802171290711701, |
| "grad_norm": 0.20109286837062904, |
| "learning_rate": 3.663985701519214e-06, |
| "loss": 0.2994, |
| "step": 2323 |
| }, |
| { |
| "epoch": 2.8033775633293123, |
| "grad_norm": 0.19996893427115703, |
| "learning_rate": 3.641644325290438e-06, |
| "loss": 0.2603, |
| "step": 2324 |
| }, |
| { |
| "epoch": 2.804583835946924, |
| "grad_norm": 0.2156601549234228, |
| "learning_rate": 3.6193029490616625e-06, |
| "loss": 0.2892, |
| "step": 2325 |
| }, |
| { |
| "epoch": 2.8057901085645356, |
| "grad_norm": 0.19954283493636063, |
| "learning_rate": 3.5969615728328866e-06, |
| "loss": 0.2848, |
| "step": 2326 |
| }, |
| { |
| "epoch": 2.806996381182147, |
| "grad_norm": 0.20103437872062893, |
| "learning_rate": 3.5746201966041107e-06, |
| "loss": 0.2802, |
| "step": 2327 |
| }, |
| { |
| "epoch": 2.808202653799759, |
| "grad_norm": 0.19973565351335576, |
| "learning_rate": 3.5522788203753356e-06, |
| "loss": 0.284, |
| "step": 2328 |
| }, |
| { |
| "epoch": 2.8094089264173703, |
| "grad_norm": 0.20898595146924076, |
| "learning_rate": 3.5299374441465597e-06, |
| "loss": 0.2866, |
| "step": 2329 |
| }, |
| { |
| "epoch": 2.8106151990349817, |
| "grad_norm": 0.19573103856679328, |
| "learning_rate": 3.507596067917784e-06, |
| "loss": 0.2876, |
| "step": 2330 |
| }, |
| { |
| "epoch": 2.8118214716525936, |
| "grad_norm": 0.2077731765783218, |
| "learning_rate": 3.4852546916890083e-06, |
| "loss": 0.2956, |
| "step": 2331 |
| }, |
| { |
| "epoch": 2.813027744270205, |
| "grad_norm": 0.2016470923946435, |
| "learning_rate": 3.4629133154602324e-06, |
| "loss": 0.2948, |
| "step": 2332 |
| }, |
| { |
| "epoch": 2.8142340168878164, |
| "grad_norm": 0.2044886910285202, |
| "learning_rate": 3.4405719392314565e-06, |
| "loss": 0.3065, |
| "step": 2333 |
| }, |
| { |
| "epoch": 2.8154402895054282, |
| "grad_norm": 0.20278178377461592, |
| "learning_rate": 3.4182305630026814e-06, |
| "loss": 0.2805, |
| "step": 2334 |
| }, |
| { |
| "epoch": 2.8166465621230397, |
| "grad_norm": 0.20087995640692985, |
| "learning_rate": 3.3958891867739055e-06, |
| "loss": 0.2733, |
| "step": 2335 |
| }, |
| { |
| "epoch": 2.8178528347406515, |
| "grad_norm": 0.20454242639756476, |
| "learning_rate": 3.3735478105451296e-06, |
| "loss": 0.2947, |
| "step": 2336 |
| }, |
| { |
| "epoch": 2.819059107358263, |
| "grad_norm": 0.19624397685889447, |
| "learning_rate": 3.351206434316354e-06, |
| "loss": 0.2792, |
| "step": 2337 |
| }, |
| { |
| "epoch": 2.820265379975875, |
| "grad_norm": 0.1918852390942654, |
| "learning_rate": 3.3288650580875782e-06, |
| "loss": 0.2746, |
| "step": 2338 |
| }, |
| { |
| "epoch": 2.821471652593486, |
| "grad_norm": 0.19264796239668547, |
| "learning_rate": 3.3065236818588023e-06, |
| "loss": 0.2988, |
| "step": 2339 |
| }, |
| { |
| "epoch": 2.8226779252110976, |
| "grad_norm": 0.19970750561027092, |
| "learning_rate": 3.2841823056300272e-06, |
| "loss": 0.2843, |
| "step": 2340 |
| }, |
| { |
| "epoch": 2.8238841978287095, |
| "grad_norm": 0.19496241921024737, |
| "learning_rate": 3.2618409294012513e-06, |
| "loss": 0.2815, |
| "step": 2341 |
| }, |
| { |
| "epoch": 2.825090470446321, |
| "grad_norm": 0.21303286652759756, |
| "learning_rate": 3.239499553172476e-06, |
| "loss": 0.2979, |
| "step": 2342 |
| }, |
| { |
| "epoch": 2.8262967430639323, |
| "grad_norm": 0.1966256591701248, |
| "learning_rate": 3.2171581769437e-06, |
| "loss": 0.2733, |
| "step": 2343 |
| }, |
| { |
| "epoch": 2.827503015681544, |
| "grad_norm": 0.19157269512914996, |
| "learning_rate": 3.194816800714924e-06, |
| "loss": 0.284, |
| "step": 2344 |
| }, |
| { |
| "epoch": 2.8287092882991556, |
| "grad_norm": 0.20806116030817426, |
| "learning_rate": 3.172475424486149e-06, |
| "loss": 0.2771, |
| "step": 2345 |
| }, |
| { |
| "epoch": 2.829915560916767, |
| "grad_norm": 0.20393659922514393, |
| "learning_rate": 3.1501340482573726e-06, |
| "loss": 0.2842, |
| "step": 2346 |
| }, |
| { |
| "epoch": 2.831121833534379, |
| "grad_norm": 0.20545574063430777, |
| "learning_rate": 3.1277926720285967e-06, |
| "loss": 0.2999, |
| "step": 2347 |
| }, |
| { |
| "epoch": 2.8323281061519903, |
| "grad_norm": 0.20800833266552515, |
| "learning_rate": 3.1054512957998212e-06, |
| "loss": 0.3056, |
| "step": 2348 |
| }, |
| { |
| "epoch": 2.8335343787696017, |
| "grad_norm": 0.1920188003389075, |
| "learning_rate": 3.0831099195710457e-06, |
| "loss": 0.2801, |
| "step": 2349 |
| }, |
| { |
| "epoch": 2.8347406513872135, |
| "grad_norm": 0.1956810201413982, |
| "learning_rate": 3.0607685433422703e-06, |
| "loss": 0.3025, |
| "step": 2350 |
| }, |
| { |
| "epoch": 2.835946924004825, |
| "grad_norm": 0.20137028715015345, |
| "learning_rate": 3.0384271671134943e-06, |
| "loss": 0.3159, |
| "step": 2351 |
| }, |
| { |
| "epoch": 2.837153196622437, |
| "grad_norm": 0.21523120752022207, |
| "learning_rate": 3.0160857908847184e-06, |
| "loss": 0.3052, |
| "step": 2352 |
| }, |
| { |
| "epoch": 2.8383594692400482, |
| "grad_norm": 0.1855207216753987, |
| "learning_rate": 2.993744414655943e-06, |
| "loss": 0.2837, |
| "step": 2353 |
| }, |
| { |
| "epoch": 2.83956574185766, |
| "grad_norm": 0.2094086834658496, |
| "learning_rate": 2.9714030384271675e-06, |
| "loss": 0.2967, |
| "step": 2354 |
| }, |
| { |
| "epoch": 2.8407720144752715, |
| "grad_norm": 0.19794775999629896, |
| "learning_rate": 2.9490616621983915e-06, |
| "loss": 0.2842, |
| "step": 2355 |
| }, |
| { |
| "epoch": 2.841978287092883, |
| "grad_norm": 0.18988974466292338, |
| "learning_rate": 2.926720285969616e-06, |
| "loss": 0.2775, |
| "step": 2356 |
| }, |
| { |
| "epoch": 2.8431845597104948, |
| "grad_norm": 0.19913264082126989, |
| "learning_rate": 2.90437890974084e-06, |
| "loss": 0.2816, |
| "step": 2357 |
| }, |
| { |
| "epoch": 2.844390832328106, |
| "grad_norm": 0.19113590403389297, |
| "learning_rate": 2.8820375335120642e-06, |
| "loss": 0.2882, |
| "step": 2358 |
| }, |
| { |
| "epoch": 2.8455971049457176, |
| "grad_norm": 0.20391500027186676, |
| "learning_rate": 2.8596961572832887e-06, |
| "loss": 0.287, |
| "step": 2359 |
| }, |
| { |
| "epoch": 2.8468033775633295, |
| "grad_norm": 0.21140527601885406, |
| "learning_rate": 2.8373547810545133e-06, |
| "loss": 0.2966, |
| "step": 2360 |
| }, |
| { |
| "epoch": 2.848009650180941, |
| "grad_norm": 0.20533216153271688, |
| "learning_rate": 2.8150134048257373e-06, |
| "loss": 0.2924, |
| "step": 2361 |
| }, |
| { |
| "epoch": 2.8492159227985523, |
| "grad_norm": 0.20311908970852507, |
| "learning_rate": 2.7926720285969614e-06, |
| "loss": 0.3017, |
| "step": 2362 |
| }, |
| { |
| "epoch": 2.850422195416164, |
| "grad_norm": 0.19773289611540928, |
| "learning_rate": 2.770330652368186e-06, |
| "loss": 0.2833, |
| "step": 2363 |
| }, |
| { |
| "epoch": 2.8516284680337756, |
| "grad_norm": 0.2062842210490875, |
| "learning_rate": 2.7479892761394105e-06, |
| "loss": 0.293, |
| "step": 2364 |
| }, |
| { |
| "epoch": 2.852834740651387, |
| "grad_norm": 0.19703442031240742, |
| "learning_rate": 2.7256478999106345e-06, |
| "loss": 0.2901, |
| "step": 2365 |
| }, |
| { |
| "epoch": 2.854041013268999, |
| "grad_norm": 0.1979587715787965, |
| "learning_rate": 2.703306523681859e-06, |
| "loss": 0.2848, |
| "step": 2366 |
| }, |
| { |
| "epoch": 2.8552472858866103, |
| "grad_norm": 0.19236896429290468, |
| "learning_rate": 2.680965147453083e-06, |
| "loss": 0.2792, |
| "step": 2367 |
| }, |
| { |
| "epoch": 2.856453558504222, |
| "grad_norm": 0.19185791310903008, |
| "learning_rate": 2.6586237712243077e-06, |
| "loss": 0.2857, |
| "step": 2368 |
| }, |
| { |
| "epoch": 2.8576598311218335, |
| "grad_norm": 0.20097778464317417, |
| "learning_rate": 2.6362823949955317e-06, |
| "loss": 0.2912, |
| "step": 2369 |
| }, |
| { |
| "epoch": 2.8588661037394454, |
| "grad_norm": 0.2055714517459925, |
| "learning_rate": 2.6139410187667563e-06, |
| "loss": 0.2997, |
| "step": 2370 |
| }, |
| { |
| "epoch": 2.860072376357057, |
| "grad_norm": 0.21732786429246168, |
| "learning_rate": 2.5915996425379808e-06, |
| "loss": 0.3055, |
| "step": 2371 |
| }, |
| { |
| "epoch": 2.861278648974668, |
| "grad_norm": 0.2502005807261682, |
| "learning_rate": 2.5692582663092044e-06, |
| "loss": 0.2856, |
| "step": 2372 |
| }, |
| { |
| "epoch": 2.86248492159228, |
| "grad_norm": 0.19987073929777327, |
| "learning_rate": 2.546916890080429e-06, |
| "loss": 0.2814, |
| "step": 2373 |
| }, |
| { |
| "epoch": 2.8636911942098915, |
| "grad_norm": 0.21371971461929823, |
| "learning_rate": 2.5245755138516535e-06, |
| "loss": 0.2895, |
| "step": 2374 |
| }, |
| { |
| "epoch": 2.864897466827503, |
| "grad_norm": 0.1947082976853003, |
| "learning_rate": 2.502234137622878e-06, |
| "loss": 0.2793, |
| "step": 2375 |
| }, |
| { |
| "epoch": 2.8661037394451148, |
| "grad_norm": 0.2020722173392859, |
| "learning_rate": 2.479892761394102e-06, |
| "loss": 0.2902, |
| "step": 2376 |
| }, |
| { |
| "epoch": 2.867310012062726, |
| "grad_norm": 0.19209835857990548, |
| "learning_rate": 2.457551385165326e-06, |
| "loss": 0.2899, |
| "step": 2377 |
| }, |
| { |
| "epoch": 2.8685162846803376, |
| "grad_norm": 0.20529591382680887, |
| "learning_rate": 2.4352100089365507e-06, |
| "loss": 0.2825, |
| "step": 2378 |
| }, |
| { |
| "epoch": 2.8697225572979495, |
| "grad_norm": 0.20486394337649172, |
| "learning_rate": 2.4128686327077747e-06, |
| "loss": 0.2881, |
| "step": 2379 |
| }, |
| { |
| "epoch": 2.870928829915561, |
| "grad_norm": 0.20950607820875253, |
| "learning_rate": 2.3905272564789993e-06, |
| "loss": 0.2991, |
| "step": 2380 |
| }, |
| { |
| "epoch": 2.8721351025331723, |
| "grad_norm": 0.1908385872610009, |
| "learning_rate": 2.3681858802502238e-06, |
| "loss": 0.2861, |
| "step": 2381 |
| }, |
| { |
| "epoch": 2.873341375150784, |
| "grad_norm": 0.19922508299237268, |
| "learning_rate": 2.345844504021448e-06, |
| "loss": 0.2855, |
| "step": 2382 |
| }, |
| { |
| "epoch": 2.8745476477683956, |
| "grad_norm": 0.22166082316021662, |
| "learning_rate": 2.323503127792672e-06, |
| "loss": 0.2946, |
| "step": 2383 |
| }, |
| { |
| "epoch": 2.875753920386007, |
| "grad_norm": 0.19461242460581957, |
| "learning_rate": 2.3011617515638965e-06, |
| "loss": 0.2714, |
| "step": 2384 |
| }, |
| { |
| "epoch": 2.876960193003619, |
| "grad_norm": 0.19352881947289724, |
| "learning_rate": 2.278820375335121e-06, |
| "loss": 0.2721, |
| "step": 2385 |
| }, |
| { |
| "epoch": 2.8781664656212302, |
| "grad_norm": 0.22058385576253617, |
| "learning_rate": 2.256478999106345e-06, |
| "loss": 0.2794, |
| "step": 2386 |
| }, |
| { |
| "epoch": 2.879372738238842, |
| "grad_norm": 0.18948954109249133, |
| "learning_rate": 2.234137622877569e-06, |
| "loss": 0.2827, |
| "step": 2387 |
| }, |
| { |
| "epoch": 2.8805790108564535, |
| "grad_norm": 0.1901617667242354, |
| "learning_rate": 2.2117962466487937e-06, |
| "loss": 0.293, |
| "step": 2388 |
| }, |
| { |
| "epoch": 2.8817852834740654, |
| "grad_norm": 0.19227085609237324, |
| "learning_rate": 2.1894548704200177e-06, |
| "loss": 0.2796, |
| "step": 2389 |
| }, |
| { |
| "epoch": 2.882991556091677, |
| "grad_norm": 0.19896604225955727, |
| "learning_rate": 2.1671134941912423e-06, |
| "loss": 0.281, |
| "step": 2390 |
| }, |
| { |
| "epoch": 2.884197828709288, |
| "grad_norm": 0.1969715058172239, |
| "learning_rate": 2.1447721179624668e-06, |
| "loss": 0.2904, |
| "step": 2391 |
| }, |
| { |
| "epoch": 2.8854041013269, |
| "grad_norm": 0.2008657406945197, |
| "learning_rate": 2.122430741733691e-06, |
| "loss": 0.2803, |
| "step": 2392 |
| }, |
| { |
| "epoch": 2.8866103739445115, |
| "grad_norm": 0.2107471673459275, |
| "learning_rate": 2.100089365504915e-06, |
| "loss": 0.2879, |
| "step": 2393 |
| }, |
| { |
| "epoch": 2.887816646562123, |
| "grad_norm": 0.19150770147349974, |
| "learning_rate": 2.0777479892761395e-06, |
| "loss": 0.291, |
| "step": 2394 |
| }, |
| { |
| "epoch": 2.8890229191797347, |
| "grad_norm": 0.2041712230658877, |
| "learning_rate": 2.055406613047364e-06, |
| "loss": 0.2874, |
| "step": 2395 |
| }, |
| { |
| "epoch": 2.890229191797346, |
| "grad_norm": 0.19970618525293937, |
| "learning_rate": 2.033065236818588e-06, |
| "loss": 0.2895, |
| "step": 2396 |
| }, |
| { |
| "epoch": 2.8914354644149576, |
| "grad_norm": 0.21995699586382159, |
| "learning_rate": 2.0107238605898126e-06, |
| "loss": 0.2929, |
| "step": 2397 |
| }, |
| { |
| "epoch": 2.8926417370325694, |
| "grad_norm": 0.1973616517335882, |
| "learning_rate": 1.9883824843610367e-06, |
| "loss": 0.2936, |
| "step": 2398 |
| }, |
| { |
| "epoch": 2.893848009650181, |
| "grad_norm": 0.1989740318792929, |
| "learning_rate": 1.966041108132261e-06, |
| "loss": 0.2861, |
| "step": 2399 |
| }, |
| { |
| "epoch": 2.8950542822677923, |
| "grad_norm": 0.2112403964468139, |
| "learning_rate": 1.9436997319034853e-06, |
| "loss": 0.2825, |
| "step": 2400 |
| }, |
| { |
| "epoch": 2.896260554885404, |
| "grad_norm": 0.20828664919750745, |
| "learning_rate": 1.9213583556747098e-06, |
| "loss": 0.281, |
| "step": 2401 |
| }, |
| { |
| "epoch": 2.8974668275030155, |
| "grad_norm": 0.1955339081984292, |
| "learning_rate": 1.899016979445934e-06, |
| "loss": 0.2768, |
| "step": 2402 |
| }, |
| { |
| "epoch": 2.8986731001206274, |
| "grad_norm": 0.22204369188455533, |
| "learning_rate": 1.8766756032171582e-06, |
| "loss": 0.2874, |
| "step": 2403 |
| }, |
| { |
| "epoch": 2.899879372738239, |
| "grad_norm": 0.2077252151848404, |
| "learning_rate": 1.8543342269883825e-06, |
| "loss": 0.2991, |
| "step": 2404 |
| }, |
| { |
| "epoch": 2.9010856453558507, |
| "grad_norm": 0.21435787298444822, |
| "learning_rate": 1.831992850759607e-06, |
| "loss": 0.2943, |
| "step": 2405 |
| }, |
| { |
| "epoch": 2.902291917973462, |
| "grad_norm": 0.1911919947784039, |
| "learning_rate": 1.8096514745308313e-06, |
| "loss": 0.2815, |
| "step": 2406 |
| }, |
| { |
| "epoch": 2.9034981905910735, |
| "grad_norm": 0.18827809153347827, |
| "learning_rate": 1.7873100983020554e-06, |
| "loss": 0.2752, |
| "step": 2407 |
| }, |
| { |
| "epoch": 2.9047044632086854, |
| "grad_norm": 0.18931406811301763, |
| "learning_rate": 1.7649687220732799e-06, |
| "loss": 0.2853, |
| "step": 2408 |
| }, |
| { |
| "epoch": 2.9059107358262968, |
| "grad_norm": 0.22505141785294047, |
| "learning_rate": 1.7426273458445042e-06, |
| "loss": 0.2881, |
| "step": 2409 |
| }, |
| { |
| "epoch": 2.907117008443908, |
| "grad_norm": 0.21027595141026514, |
| "learning_rate": 1.7202859696157283e-06, |
| "loss": 0.2902, |
| "step": 2410 |
| }, |
| { |
| "epoch": 2.90832328106152, |
| "grad_norm": 0.20210659855206092, |
| "learning_rate": 1.6979445933869528e-06, |
| "loss": 0.2831, |
| "step": 2411 |
| }, |
| { |
| "epoch": 2.9095295536791315, |
| "grad_norm": 0.1842267253698121, |
| "learning_rate": 1.675603217158177e-06, |
| "loss": 0.2854, |
| "step": 2412 |
| }, |
| { |
| "epoch": 2.910735826296743, |
| "grad_norm": 0.2091748826161693, |
| "learning_rate": 1.6532618409294012e-06, |
| "loss": 0.2783, |
| "step": 2413 |
| }, |
| { |
| "epoch": 2.9119420989143547, |
| "grad_norm": 0.19331995813160882, |
| "learning_rate": 1.6309204647006257e-06, |
| "loss": 0.2742, |
| "step": 2414 |
| }, |
| { |
| "epoch": 2.913148371531966, |
| "grad_norm": 0.20789168735658, |
| "learning_rate": 1.60857908847185e-06, |
| "loss": 0.2834, |
| "step": 2415 |
| }, |
| { |
| "epoch": 2.9143546441495776, |
| "grad_norm": 0.22504179670692293, |
| "learning_rate": 1.5862377122430745e-06, |
| "loss": 0.2851, |
| "step": 2416 |
| }, |
| { |
| "epoch": 2.9155609167671894, |
| "grad_norm": 0.20044801055007996, |
| "learning_rate": 1.5638963360142984e-06, |
| "loss": 0.2793, |
| "step": 2417 |
| }, |
| { |
| "epoch": 2.916767189384801, |
| "grad_norm": 0.22060197936804501, |
| "learning_rate": 1.5415549597855229e-06, |
| "loss": 0.3086, |
| "step": 2418 |
| }, |
| { |
| "epoch": 2.9179734620024127, |
| "grad_norm": 0.21133137826378076, |
| "learning_rate": 1.5192135835567472e-06, |
| "loss": 0.2909, |
| "step": 2419 |
| }, |
| { |
| "epoch": 2.919179734620024, |
| "grad_norm": 0.19782064257474333, |
| "learning_rate": 1.4968722073279715e-06, |
| "loss": 0.278, |
| "step": 2420 |
| }, |
| { |
| "epoch": 2.920386007237636, |
| "grad_norm": 0.19534968132532227, |
| "learning_rate": 1.4745308310991958e-06, |
| "loss": 0.294, |
| "step": 2421 |
| }, |
| { |
| "epoch": 2.9215922798552474, |
| "grad_norm": 0.19077219973811435, |
| "learning_rate": 1.45218945487042e-06, |
| "loss": 0.2649, |
| "step": 2422 |
| }, |
| { |
| "epoch": 2.922798552472859, |
| "grad_norm": 0.18896064658647335, |
| "learning_rate": 1.4298480786416444e-06, |
| "loss": 0.2791, |
| "step": 2423 |
| }, |
| { |
| "epoch": 2.9240048250904707, |
| "grad_norm": 0.20355436076924752, |
| "learning_rate": 1.4075067024128687e-06, |
| "loss": 0.2912, |
| "step": 2424 |
| }, |
| { |
| "epoch": 2.925211097708082, |
| "grad_norm": 0.19105724867358181, |
| "learning_rate": 1.385165326184093e-06, |
| "loss": 0.281, |
| "step": 2425 |
| }, |
| { |
| "epoch": 2.9264173703256935, |
| "grad_norm": 0.1943840621294346, |
| "learning_rate": 1.3628239499553173e-06, |
| "loss": 0.2786, |
| "step": 2426 |
| }, |
| { |
| "epoch": 2.9276236429433053, |
| "grad_norm": 0.19061678832395212, |
| "learning_rate": 1.3404825737265416e-06, |
| "loss": 0.2776, |
| "step": 2427 |
| }, |
| { |
| "epoch": 2.9288299155609168, |
| "grad_norm": 0.20255117048830434, |
| "learning_rate": 1.3181411974977659e-06, |
| "loss": 0.2847, |
| "step": 2428 |
| }, |
| { |
| "epoch": 2.930036188178528, |
| "grad_norm": 0.20576627278355886, |
| "learning_rate": 1.2957998212689904e-06, |
| "loss": 0.3018, |
| "step": 2429 |
| }, |
| { |
| "epoch": 2.93124246079614, |
| "grad_norm": 0.20319570033218337, |
| "learning_rate": 1.2734584450402145e-06, |
| "loss": 0.2939, |
| "step": 2430 |
| }, |
| { |
| "epoch": 2.9324487334137515, |
| "grad_norm": 0.18540791897808953, |
| "learning_rate": 1.251117068811439e-06, |
| "loss": 0.2818, |
| "step": 2431 |
| }, |
| { |
| "epoch": 2.933655006031363, |
| "grad_norm": 0.18714821054278172, |
| "learning_rate": 1.228775692582663e-06, |
| "loss": 0.2899, |
| "step": 2432 |
| }, |
| { |
| "epoch": 2.9348612786489747, |
| "grad_norm": 0.19047448250427157, |
| "learning_rate": 1.2064343163538874e-06, |
| "loss": 0.2938, |
| "step": 2433 |
| }, |
| { |
| "epoch": 2.936067551266586, |
| "grad_norm": 0.1808164949151054, |
| "learning_rate": 1.1840929401251119e-06, |
| "loss": 0.273, |
| "step": 2434 |
| }, |
| { |
| "epoch": 2.9372738238841976, |
| "grad_norm": 0.18878761414697054, |
| "learning_rate": 1.161751563896336e-06, |
| "loss": 0.2898, |
| "step": 2435 |
| }, |
| { |
| "epoch": 2.9384800965018094, |
| "grad_norm": 0.20432487218307782, |
| "learning_rate": 1.1394101876675605e-06, |
| "loss": 0.2892, |
| "step": 2436 |
| }, |
| { |
| "epoch": 2.939686369119421, |
| "grad_norm": 0.18870479397965104, |
| "learning_rate": 1.1170688114387846e-06, |
| "loss": 0.2758, |
| "step": 2437 |
| }, |
| { |
| "epoch": 2.9408926417370327, |
| "grad_norm": 0.19391649412908152, |
| "learning_rate": 1.0947274352100089e-06, |
| "loss": 0.2812, |
| "step": 2438 |
| }, |
| { |
| "epoch": 2.942098914354644, |
| "grad_norm": 0.19901047358991764, |
| "learning_rate": 1.0723860589812334e-06, |
| "loss": 0.2915, |
| "step": 2439 |
| }, |
| { |
| "epoch": 2.943305186972256, |
| "grad_norm": 0.19947394557395554, |
| "learning_rate": 1.0500446827524575e-06, |
| "loss": 0.2731, |
| "step": 2440 |
| }, |
| { |
| "epoch": 2.9445114595898674, |
| "grad_norm": 0.1921348495014182, |
| "learning_rate": 1.027703306523682e-06, |
| "loss": 0.28, |
| "step": 2441 |
| }, |
| { |
| "epoch": 2.945717732207479, |
| "grad_norm": 0.19667583970583413, |
| "learning_rate": 1.0053619302949063e-06, |
| "loss": 0.2927, |
| "step": 2442 |
| }, |
| { |
| "epoch": 2.9469240048250906, |
| "grad_norm": 0.1875571885355997, |
| "learning_rate": 9.830205540661306e-07, |
| "loss": 0.2814, |
| "step": 2443 |
| }, |
| { |
| "epoch": 2.948130277442702, |
| "grad_norm": 0.19359347961380186, |
| "learning_rate": 9.606791778373549e-07, |
| "loss": 0.2836, |
| "step": 2444 |
| }, |
| { |
| "epoch": 2.9493365500603135, |
| "grad_norm": 0.1948502151304172, |
| "learning_rate": 9.383378016085791e-07, |
| "loss": 0.2765, |
| "step": 2445 |
| }, |
| { |
| "epoch": 2.9505428226779253, |
| "grad_norm": 0.1986095387977726, |
| "learning_rate": 9.159964253798035e-07, |
| "loss": 0.2939, |
| "step": 2446 |
| }, |
| { |
| "epoch": 2.9517490952955368, |
| "grad_norm": 0.20106803721894279, |
| "learning_rate": 8.936550491510277e-07, |
| "loss": 0.2807, |
| "step": 2447 |
| }, |
| { |
| "epoch": 2.952955367913148, |
| "grad_norm": 0.19573487567653272, |
| "learning_rate": 8.713136729222521e-07, |
| "loss": 0.2934, |
| "step": 2448 |
| }, |
| { |
| "epoch": 2.95416164053076, |
| "grad_norm": 0.19156764509284374, |
| "learning_rate": 8.489722966934764e-07, |
| "loss": 0.2672, |
| "step": 2449 |
| }, |
| { |
| "epoch": 2.9553679131483714, |
| "grad_norm": 0.19620055140220727, |
| "learning_rate": 8.266309204647006e-07, |
| "loss": 0.2925, |
| "step": 2450 |
| }, |
| { |
| "epoch": 2.956574185765983, |
| "grad_norm": 0.20150136995738124, |
| "learning_rate": 8.04289544235925e-07, |
| "loss": 0.2885, |
| "step": 2451 |
| }, |
| { |
| "epoch": 2.9577804583835947, |
| "grad_norm": 0.18808650789680123, |
| "learning_rate": 7.819481680071492e-07, |
| "loss": 0.2877, |
| "step": 2452 |
| }, |
| { |
| "epoch": 2.958986731001206, |
| "grad_norm": 0.20237101627697343, |
| "learning_rate": 7.596067917783736e-07, |
| "loss": 0.295, |
| "step": 2453 |
| }, |
| { |
| "epoch": 2.960193003618818, |
| "grad_norm": 0.1902312694516005, |
| "learning_rate": 7.372654155495979e-07, |
| "loss": 0.2761, |
| "step": 2454 |
| }, |
| { |
| "epoch": 2.9613992762364294, |
| "grad_norm": 0.1889315314894398, |
| "learning_rate": 7.149240393208222e-07, |
| "loss": 0.2949, |
| "step": 2455 |
| }, |
| { |
| "epoch": 2.9626055488540413, |
| "grad_norm": 0.19561298973269395, |
| "learning_rate": 6.925826630920465e-07, |
| "loss": 0.2881, |
| "step": 2456 |
| }, |
| { |
| "epoch": 2.9638118214716527, |
| "grad_norm": 0.19245025002510016, |
| "learning_rate": 6.702412868632708e-07, |
| "loss": 0.2836, |
| "step": 2457 |
| }, |
| { |
| "epoch": 2.965018094089264, |
| "grad_norm": 0.18665421617528258, |
| "learning_rate": 6.478999106344952e-07, |
| "loss": 0.2767, |
| "step": 2458 |
| }, |
| { |
| "epoch": 2.966224366706876, |
| "grad_norm": 0.21266300683887, |
| "learning_rate": 6.255585344057195e-07, |
| "loss": 0.2934, |
| "step": 2459 |
| }, |
| { |
| "epoch": 2.9674306393244874, |
| "grad_norm": 0.1933172786462093, |
| "learning_rate": 6.032171581769437e-07, |
| "loss": 0.2963, |
| "step": 2460 |
| }, |
| { |
| "epoch": 2.9686369119420988, |
| "grad_norm": 0.18850035175693095, |
| "learning_rate": 5.80875781948168e-07, |
| "loss": 0.2873, |
| "step": 2461 |
| }, |
| { |
| "epoch": 2.9698431845597106, |
| "grad_norm": 0.20078041695018908, |
| "learning_rate": 5.585344057193923e-07, |
| "loss": 0.2838, |
| "step": 2462 |
| }, |
| { |
| "epoch": 2.971049457177322, |
| "grad_norm": 0.21181100144588538, |
| "learning_rate": 5.361930294906167e-07, |
| "loss": 0.2913, |
| "step": 2463 |
| }, |
| { |
| "epoch": 2.9722557297949335, |
| "grad_norm": 0.207677897790048, |
| "learning_rate": 5.13851653261841e-07, |
| "loss": 0.2878, |
| "step": 2464 |
| }, |
| { |
| "epoch": 2.9734620024125453, |
| "grad_norm": 0.19564831186151077, |
| "learning_rate": 4.915102770330653e-07, |
| "loss": 0.2827, |
| "step": 2465 |
| }, |
| { |
| "epoch": 2.9746682750301567, |
| "grad_norm": 0.17970354493070714, |
| "learning_rate": 4.6916890080428954e-07, |
| "loss": 0.2792, |
| "step": 2466 |
| }, |
| { |
| "epoch": 2.975874547647768, |
| "grad_norm": 0.18873251167142202, |
| "learning_rate": 4.4682752457551384e-07, |
| "loss": 0.278, |
| "step": 2467 |
| }, |
| { |
| "epoch": 2.97708082026538, |
| "grad_norm": 0.18314280872829297, |
| "learning_rate": 4.244861483467382e-07, |
| "loss": 0.2812, |
| "step": 2468 |
| }, |
| { |
| "epoch": 2.9782870928829914, |
| "grad_norm": 0.1922397965492106, |
| "learning_rate": 4.021447721179625e-07, |
| "loss": 0.3008, |
| "step": 2469 |
| }, |
| { |
| "epoch": 2.9794933655006033, |
| "grad_norm": 0.20322955656849276, |
| "learning_rate": 3.798033958891868e-07, |
| "loss": 0.2938, |
| "step": 2470 |
| }, |
| { |
| "epoch": 2.9806996381182147, |
| "grad_norm": 0.18912287402642547, |
| "learning_rate": 3.574620196604111e-07, |
| "loss": 0.2856, |
| "step": 2471 |
| }, |
| { |
| "epoch": 2.9819059107358266, |
| "grad_norm": 0.18348812116265312, |
| "learning_rate": 3.351206434316354e-07, |
| "loss": 0.2734, |
| "step": 2472 |
| }, |
| { |
| "epoch": 2.983112183353438, |
| "grad_norm": 0.20205824365979516, |
| "learning_rate": 3.1277926720285975e-07, |
| "loss": 0.2856, |
| "step": 2473 |
| }, |
| { |
| "epoch": 2.9843184559710494, |
| "grad_norm": 0.18061090101631558, |
| "learning_rate": 2.90437890974084e-07, |
| "loss": 0.2889, |
| "step": 2474 |
| }, |
| { |
| "epoch": 2.9855247285886612, |
| "grad_norm": 0.19600190278992066, |
| "learning_rate": 2.6809651474530835e-07, |
| "loss": 0.2954, |
| "step": 2475 |
| }, |
| { |
| "epoch": 2.9867310012062727, |
| "grad_norm": 0.1851582944696054, |
| "learning_rate": 2.4575513851653265e-07, |
| "loss": 0.2702, |
| "step": 2476 |
| }, |
| { |
| "epoch": 2.987937273823884, |
| "grad_norm": 0.19177764461246363, |
| "learning_rate": 2.2341376228775692e-07, |
| "loss": 0.2818, |
| "step": 2477 |
| }, |
| { |
| "epoch": 2.989143546441496, |
| "grad_norm": 0.18832137389477646, |
| "learning_rate": 2.0107238605898125e-07, |
| "loss": 0.2744, |
| "step": 2478 |
| }, |
| { |
| "epoch": 2.9903498190591074, |
| "grad_norm": 0.1934068643268722, |
| "learning_rate": 1.7873100983020555e-07, |
| "loss": 0.2964, |
| "step": 2479 |
| }, |
| { |
| "epoch": 2.9915560916767188, |
| "grad_norm": 0.19723930708026718, |
| "learning_rate": 1.5638963360142987e-07, |
| "loss": 0.2963, |
| "step": 2480 |
| }, |
| { |
| "epoch": 2.9927623642943306, |
| "grad_norm": 0.18748029021039345, |
| "learning_rate": 1.3404825737265417e-07, |
| "loss": 0.2819, |
| "step": 2481 |
| }, |
| { |
| "epoch": 2.993968636911942, |
| "grad_norm": 0.19792207901814557, |
| "learning_rate": 1.1170688114387846e-07, |
| "loss": 0.2997, |
| "step": 2482 |
| }, |
| { |
| "epoch": 2.9951749095295535, |
| "grad_norm": 0.18747972922352799, |
| "learning_rate": 8.936550491510277e-08, |
| "loss": 0.2742, |
| "step": 2483 |
| }, |
| { |
| "epoch": 2.9963811821471653, |
| "grad_norm": 0.19061815955899578, |
| "learning_rate": 6.702412868632709e-08, |
| "loss": 0.2833, |
| "step": 2484 |
| }, |
| { |
| "epoch": 2.9975874547647767, |
| "grad_norm": 0.18986370796667845, |
| "learning_rate": 4.4682752457551387e-08, |
| "loss": 0.2966, |
| "step": 2485 |
| }, |
| { |
| "epoch": 2.9987937273823886, |
| "grad_norm": 0.1746287102520941, |
| "learning_rate": 2.2341376228775693e-08, |
| "loss": 0.2758, |
| "step": 2486 |
| }, |
| { |
| "epoch": 3.0, |
| "grad_norm": 0.189086510210081, |
| "learning_rate": 0.0, |
| "loss": 0.2636, |
| "step": 2487 |
| }, |
| { |
| "epoch": 3.0, |
| "step": 2487, |
| "total_flos": 2.1271624359105004e+18, |
| "train_loss": 0.4341153975203184, |
| "train_runtime": 146020.7654, |
| "train_samples_per_second": 0.272, |
| "train_steps_per_second": 0.017 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 2487, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 3, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 2.1271624359105004e+18, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|