| { |
| "best_global_step": null, |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 0.01934362475578674, |
| "eval_steps": 500, |
| "global_step": 21000, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 9.21124988370797e-06, |
| "grad_norm": 24543.888671875, |
| "learning_rate": 2e-09, |
| "loss": 220.5426, |
| "step": 10 |
| }, |
| { |
| "epoch": 1.842249976741594e-05, |
| "grad_norm": 10241.0400390625, |
| "learning_rate": 4e-09, |
| "loss": 220.1758, |
| "step": 20 |
| }, |
| { |
| "epoch": 2.763374965112391e-05, |
| "grad_norm": 10399.990234375, |
| "learning_rate": 5.999999999999999e-09, |
| "loss": 220.2205, |
| "step": 30 |
| }, |
| { |
| "epoch": 3.684499953483188e-05, |
| "grad_norm": 13848.333984375, |
| "learning_rate": 8e-09, |
| "loss": 220.3072, |
| "step": 40 |
| }, |
| { |
| "epoch": 4.605624941853985e-05, |
| "grad_norm": 9315.6455078125, |
| "learning_rate": 1e-08, |
| "loss": 221.6146, |
| "step": 50 |
| }, |
| { |
| "epoch": 5.526749930224782e-05, |
| "grad_norm": 8438.7548828125, |
| "learning_rate": 1.1999999999999998e-08, |
| "loss": 220.226, |
| "step": 60 |
| }, |
| { |
| "epoch": 6.447874918595579e-05, |
| "grad_norm": 3931.7529296875, |
| "learning_rate": 1.4000000000000001e-08, |
| "loss": 220.3897, |
| "step": 70 |
| }, |
| { |
| "epoch": 7.368999906966376e-05, |
| "grad_norm": 14291.806640625, |
| "learning_rate": 1.6e-08, |
| "loss": 219.9478, |
| "step": 80 |
| }, |
| { |
| "epoch": 8.290124895337174e-05, |
| "grad_norm": 23687.798828125, |
| "learning_rate": 1.8e-08, |
| "loss": 219.5298, |
| "step": 90 |
| }, |
| { |
| "epoch": 9.21124988370797e-05, |
| "grad_norm": 4884.8232421875, |
| "learning_rate": 2e-08, |
| "loss": 220.2271, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.00010132374872078767, |
| "grad_norm": 4641.21484375, |
| "learning_rate": 2.2e-08, |
| "loss": 219.4331, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.00011053499860449564, |
| "grad_norm": 6408.65966796875, |
| "learning_rate": 2.3999999999999997e-08, |
| "loss": 219.8701, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.00011974624848820361, |
| "grad_norm": 10528.3056640625, |
| "learning_rate": 2.6e-08, |
| "loss": 218.9679, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.00012895749837191158, |
| "grad_norm": 8900.884765625, |
| "learning_rate": 2.8000000000000003e-08, |
| "loss": 219.1065, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.00013816874825561956, |
| "grad_norm": 10524.498046875, |
| "learning_rate": 3e-08, |
| "loss": 219.1305, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.0001473799981393275, |
| "grad_norm": 6538.634765625, |
| "learning_rate": 3.2e-08, |
| "loss": 217.901, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.0001565912480230355, |
| "grad_norm": 10810.71484375, |
| "learning_rate": 3.4e-08, |
| "loss": 217.3209, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.00016580249790674347, |
| "grad_norm": 21237.3671875, |
| "learning_rate": 3.6e-08, |
| "loss": 216.4422, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.00017501374779045143, |
| "grad_norm": 5481.71875, |
| "learning_rate": 3.7999999999999996e-08, |
| "loss": 216.4666, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.0001842249976741594, |
| "grad_norm": 6831.8037109375, |
| "learning_rate": 4e-08, |
| "loss": 216.237, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.00019343624755786736, |
| "grad_norm": 9414.6396484375, |
| "learning_rate": 4.2e-08, |
| "loss": 212.1064, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.00020264749744157534, |
| "grad_norm": 10094.9619140625, |
| "learning_rate": 4.4e-08, |
| "loss": 209.3174, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.00021185874732528333, |
| "grad_norm": 7225.1728515625, |
| "learning_rate": 4.6e-08, |
| "loss": 207.7483, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.00022106999720899128, |
| "grad_norm": 6287.2392578125, |
| "learning_rate": 4.799999999999999e-08, |
| "loss": 206.831, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.00023028124709269926, |
| "grad_norm": 9522.2490234375, |
| "learning_rate": 5e-08, |
| "loss": 206.6122, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.00023949249697640721, |
| "grad_norm": 10925.5576171875, |
| "learning_rate": 5.2e-08, |
| "loss": 205.8413, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.00024870374686011517, |
| "grad_norm": 10365.884765625, |
| "learning_rate": 5.4e-08, |
| "loss": 205.4764, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.00025791499674382315, |
| "grad_norm": 8383.28515625, |
| "learning_rate": 5.6000000000000005e-08, |
| "loss": 204.3877, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.00026712624662753113, |
| "grad_norm": 11649.3759765625, |
| "learning_rate": 5.7999999999999997e-08, |
| "loss": 200.9893, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.0002763374965112391, |
| "grad_norm": 7531.2880859375, |
| "learning_rate": 6e-08, |
| "loss": 194.6252, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.0002855487463949471, |
| "grad_norm": 6068.7568359375, |
| "learning_rate": 6.2e-08, |
| "loss": 189.857, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.000294759996278655, |
| "grad_norm": 5815.9833984375, |
| "learning_rate": 6.4e-08, |
| "loss": 185.5137, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.000303971246162363, |
| "grad_norm": 21107.716796875, |
| "learning_rate": 6.6e-08, |
| "loss": 181.9634, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.000313182496046071, |
| "grad_norm": 6734.845703125, |
| "learning_rate": 6.8e-08, |
| "loss": 179.3172, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.00032239374592977896, |
| "grad_norm": 4339.88330078125, |
| "learning_rate": 6.999999999999999e-08, |
| "loss": 175.8778, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.00033160499581348695, |
| "grad_norm": 4096.224609375, |
| "learning_rate": 7.2e-08, |
| "loss": 170.4864, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.0003408162456971949, |
| "grad_norm": 6760.9658203125, |
| "learning_rate": 7.4e-08, |
| "loss": 163.9281, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.00035002749558090285, |
| "grad_norm": 14649.873046875, |
| "learning_rate": 7.599999999999999e-08, |
| "loss": 155.3414, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.00035923874546461084, |
| "grad_norm": 2495.66015625, |
| "learning_rate": 7.8e-08, |
| "loss": 146.2131, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.0003684499953483188, |
| "grad_norm": 8519.6259765625, |
| "learning_rate": 8e-08, |
| "loss": 138.1042, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.0003776612452320268, |
| "grad_norm": 1903.248046875, |
| "learning_rate": 8.199999999999999e-08, |
| "loss": 131.0896, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.0003868724951157347, |
| "grad_norm": 9463.728515625, |
| "learning_rate": 8.4e-08, |
| "loss": 125.0731, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.0003960837449994427, |
| "grad_norm": 1609.21142578125, |
| "learning_rate": 8.599999999999999e-08, |
| "loss": 120.3414, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.0004052949948831507, |
| "grad_norm": 2755.753173828125, |
| "learning_rate": 8.8e-08, |
| "loss": 116.4982, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.00041450624476685867, |
| "grad_norm": 1960.21826171875, |
| "learning_rate": 9e-08, |
| "loss": 112.9642, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.00042371749465056665, |
| "grad_norm": 1926.123779296875, |
| "learning_rate": 9.2e-08, |
| "loss": 110.3594, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.0004329287445342746, |
| "grad_norm": 2423.68701171875, |
| "learning_rate": 9.4e-08, |
| "loss": 107.1804, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.00044213999441798256, |
| "grad_norm": 1817.8050537109375, |
| "learning_rate": 9.599999999999999e-08, |
| "loss": 104.2375, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.00045135124430169054, |
| "grad_norm": 2031.8873291015625, |
| "learning_rate": 9.799999999999999e-08, |
| "loss": 101.777, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.0004605624941853985, |
| "grad_norm": 2006.6820068359375, |
| "learning_rate": 1e-07, |
| "loss": 99.1171, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.0004697737440691065, |
| "grad_norm": 2212.011474609375, |
| "learning_rate": 1.02e-07, |
| "loss": 96.3989, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.00047898499395281443, |
| "grad_norm": 2256.823974609375, |
| "learning_rate": 1.04e-07, |
| "loss": 94.1581, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.0004881962438365224, |
| "grad_norm": 2405.353515625, |
| "learning_rate": 1.06e-07, |
| "loss": 91.9177, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.0004974074937202303, |
| "grad_norm": 2485.800537109375, |
| "learning_rate": 1.08e-07, |
| "loss": 90.2174, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.0005066187436039383, |
| "grad_norm": 2490.753173828125, |
| "learning_rate": 1.1e-07, |
| "loss": 88.5933, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.0005158299934876463, |
| "grad_norm": 2577.428955078125, |
| "learning_rate": 1.1200000000000001e-07, |
| "loss": 86.8703, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.0005250412433713543, |
| "grad_norm": 2606.63330078125, |
| "learning_rate": 1.1399999999999999e-07, |
| "loss": 85.5399, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.0005342524932550623, |
| "grad_norm": 2648.078369140625, |
| "learning_rate": 1.1599999999999999e-07, |
| "loss": 83.9069, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.0005434637431387702, |
| "grad_norm": 2611.26904296875, |
| "learning_rate": 1.1799999999999998e-07, |
| "loss": 82.1513, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.0005526749930224782, |
| "grad_norm": 2653.947021484375, |
| "learning_rate": 1.2e-07, |
| "loss": 81.2791, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.0005618862429061862, |
| "grad_norm": 2618.644287109375, |
| "learning_rate": 1.2199999999999998e-07, |
| "loss": 79.6853, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.0005710974927898942, |
| "grad_norm": 2638.73095703125, |
| "learning_rate": 1.24e-07, |
| "loss": 78.8529, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.0005803087426736022, |
| "grad_norm": 2632.6005859375, |
| "learning_rate": 1.26e-07, |
| "loss": 77.7486, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.00058951999255731, |
| "grad_norm": 2638.947265625, |
| "learning_rate": 1.28e-07, |
| "loss": 76.6229, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.000598731242441018, |
| "grad_norm": 2649.839111328125, |
| "learning_rate": 1.3e-07, |
| "loss": 75.9808, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.000607942492324726, |
| "grad_norm": 2665.275390625, |
| "learning_rate": 1.32e-07, |
| "loss": 75.2043, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.000617153742208434, |
| "grad_norm": 2643.500244140625, |
| "learning_rate": 1.34e-07, |
| "loss": 74.1522, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.000626364992092142, |
| "grad_norm": 2625.662109375, |
| "learning_rate": 1.36e-07, |
| "loss": 73.2423, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.00063557624197585, |
| "grad_norm": 2624.3466796875, |
| "learning_rate": 1.38e-07, |
| "loss": 72.314, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.0006447874918595579, |
| "grad_norm": 2613.97265625, |
| "learning_rate": 1.3999999999999998e-07, |
| "loss": 71.3904, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.0006539987417432659, |
| "grad_norm": 2650.936767578125, |
| "learning_rate": 1.4199999999999997e-07, |
| "loss": 70.7865, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.0006632099916269739, |
| "grad_norm": 2637.9765625, |
| "learning_rate": 1.44e-07, |
| "loss": 70.0417, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.0006724212415106819, |
| "grad_norm": 2641.185302734375, |
| "learning_rate": 1.4599999999999998e-07, |
| "loss": 69.3083, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.0006816324913943897, |
| "grad_norm": 2681.737548828125, |
| "learning_rate": 1.48e-07, |
| "loss": 68.6363, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.0006908437412780977, |
| "grad_norm": 2625.10546875, |
| "learning_rate": 1.5e-07, |
| "loss": 67.5002, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.0007000549911618057, |
| "grad_norm": 2633.7431640625, |
| "learning_rate": 1.5199999999999998e-07, |
| "loss": 66.9992, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.0007092662410455137, |
| "grad_norm": 2671.560302734375, |
| "learning_rate": 1.54e-07, |
| "loss": 66.151, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.0007184774909292217, |
| "grad_norm": 2655.801025390625, |
| "learning_rate": 1.56e-07, |
| "loss": 65.2985, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.0007276887408129297, |
| "grad_norm": 2656.512451171875, |
| "learning_rate": 1.58e-07, |
| "loss": 64.118, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.0007368999906966376, |
| "grad_norm": 2651.82373046875, |
| "learning_rate": 1.6e-07, |
| "loss": 63.7059, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.0007461112405803456, |
| "grad_norm": 2656.808837890625, |
| "learning_rate": 1.62e-07, |
| "loss": 62.8785, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.0007553224904640536, |
| "grad_norm": 2620.306640625, |
| "learning_rate": 1.6399999999999999e-07, |
| "loss": 62.1854, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.0007645337403477616, |
| "grad_norm": 2649.009033203125, |
| "learning_rate": 1.6599999999999998e-07, |
| "loss": 61.4117, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.0007737449902314695, |
| "grad_norm": 2691.31201171875, |
| "learning_rate": 1.68e-07, |
| "loss": 60.871, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.0007829562401151774, |
| "grad_norm": 2679.378173828125, |
| "learning_rate": 1.7e-07, |
| "loss": 59.8941, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.0007921674899988854, |
| "grad_norm": 2690.009765625, |
| "learning_rate": 1.7199999999999998e-07, |
| "loss": 58.9565, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.0008013787398825934, |
| "grad_norm": 2680.283447265625, |
| "learning_rate": 1.74e-07, |
| "loss": 58.4268, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.0008105899897663014, |
| "grad_norm": 2683.234619140625, |
| "learning_rate": 1.76e-07, |
| "loss": 57.5642, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.0008198012396500094, |
| "grad_norm": 2687.000732421875, |
| "learning_rate": 1.78e-07, |
| "loss": 56.8452, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.0008290124895337173, |
| "grad_norm": 2667.076904296875, |
| "learning_rate": 1.8e-07, |
| "loss": 55.9111, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.0008382237394174253, |
| "grad_norm": 2659.440185546875, |
| "learning_rate": 1.82e-07, |
| "loss": 55.1071, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.0008474349893011333, |
| "grad_norm": 2668.349853515625, |
| "learning_rate": 1.84e-07, |
| "loss": 54.3913, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.0008566462391848413, |
| "grad_norm": 2719.93603515625, |
| "learning_rate": 1.86e-07, |
| "loss": 53.4342, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.0008658574890685492, |
| "grad_norm": 2650.142822265625, |
| "learning_rate": 1.88e-07, |
| "loss": 52.6626, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.0008750687389522571, |
| "grad_norm": 2705.26123046875, |
| "learning_rate": 1.8999999999999998e-07, |
| "loss": 51.7493, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.0008842799888359651, |
| "grad_norm": 2648.907470703125, |
| "learning_rate": 1.9199999999999997e-07, |
| "loss": 50.7257, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.0008934912387196731, |
| "grad_norm": 2708.065185546875, |
| "learning_rate": 1.94e-07, |
| "loss": 50.0915, |
| "step": 970 |
| }, |
| { |
| "epoch": 0.0009027024886033811, |
| "grad_norm": 2660.25439453125, |
| "learning_rate": 1.9599999999999998e-07, |
| "loss": 48.808, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.0009119137384870891, |
| "grad_norm": 2662.50927734375, |
| "learning_rate": 1.98e-07, |
| "loss": 47.8852, |
| "step": 990 |
| }, |
| { |
| "epoch": 0.000921124988370797, |
| "grad_norm": 2707.00341796875, |
| "learning_rate": 2e-07, |
| "loss": 47.2123, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.000930336238254505, |
| "grad_norm": 2675.22216796875, |
| "learning_rate": 1.9999999998952275e-07, |
| "loss": 46.0752, |
| "step": 1010 |
| }, |
| { |
| "epoch": 0.000939547488138213, |
| "grad_norm": 2697.236083984375, |
| "learning_rate": 1.9999999995809102e-07, |
| "loss": 45.2946, |
| "step": 1020 |
| }, |
| { |
| "epoch": 0.000948758738021921, |
| "grad_norm": 2703.52490234375, |
| "learning_rate": 1.999999999057048e-07, |
| "loss": 44.6116, |
| "step": 1030 |
| }, |
| { |
| "epoch": 0.0009579699879056289, |
| "grad_norm": 2686.092529296875, |
| "learning_rate": 1.999999998323641e-07, |
| "loss": 43.5896, |
| "step": 1040 |
| }, |
| { |
| "epoch": 0.0009671812377893368, |
| "grad_norm": 2677.6640625, |
| "learning_rate": 1.9999999973806892e-07, |
| "loss": 42.8424, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.0009763924876730448, |
| "grad_norm": 2650.331298828125, |
| "learning_rate": 1.9999999962281924e-07, |
| "loss": 42.0022, |
| "step": 1060 |
| }, |
| { |
| "epoch": 0.000985603737556753, |
| "grad_norm": 2642.42626953125, |
| "learning_rate": 1.9999999948661507e-07, |
| "loss": 41.0716, |
| "step": 1070 |
| }, |
| { |
| "epoch": 0.0009948149874404607, |
| "grad_norm": 2689.304931640625, |
| "learning_rate": 1.9999999932945644e-07, |
| "loss": 40.4466, |
| "step": 1080 |
| }, |
| { |
| "epoch": 0.0010040262373241687, |
| "grad_norm": 2669.737548828125, |
| "learning_rate": 1.9999999915134328e-07, |
| "loss": 39.5445, |
| "step": 1090 |
| }, |
| { |
| "epoch": 0.0010132374872078766, |
| "grad_norm": 2649.470703125, |
| "learning_rate": 1.999999989522757e-07, |
| "loss": 38.6263, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.0010224487370915846, |
| "grad_norm": 2627.875732421875, |
| "learning_rate": 1.999999987322536e-07, |
| "loss": 37.7466, |
| "step": 1110 |
| }, |
| { |
| "epoch": 0.0010316599869752926, |
| "grad_norm": 2618.5498046875, |
| "learning_rate": 1.99999998491277e-07, |
| "loss": 36.9214, |
| "step": 1120 |
| }, |
| { |
| "epoch": 0.0010408712368590006, |
| "grad_norm": 2646.177734375, |
| "learning_rate": 1.9999999822934593e-07, |
| "loss": 36.1607, |
| "step": 1130 |
| }, |
| { |
| "epoch": 0.0010500824867427086, |
| "grad_norm": 2627.651123046875, |
| "learning_rate": 1.9999999794646035e-07, |
| "loss": 35.3319, |
| "step": 1140 |
| }, |
| { |
| "epoch": 0.0010592937366264165, |
| "grad_norm": 2612.81591796875, |
| "learning_rate": 1.9999999764262032e-07, |
| "loss": 34.4908, |
| "step": 1150 |
| }, |
| { |
| "epoch": 0.0010685049865101245, |
| "grad_norm": 2596.915283203125, |
| "learning_rate": 1.9999999731782577e-07, |
| "loss": 33.6653, |
| "step": 1160 |
| }, |
| { |
| "epoch": 0.0010777162363938325, |
| "grad_norm": 2546.65380859375, |
| "learning_rate": 1.9999999697207677e-07, |
| "loss": 32.8168, |
| "step": 1170 |
| }, |
| { |
| "epoch": 0.0010869274862775405, |
| "grad_norm": 2579.26953125, |
| "learning_rate": 1.9999999660537329e-07, |
| "loss": 32.1208, |
| "step": 1180 |
| }, |
| { |
| "epoch": 0.0010961387361612485, |
| "grad_norm": 2549.15576171875, |
| "learning_rate": 1.999999962177153e-07, |
| "loss": 31.1698, |
| "step": 1190 |
| }, |
| { |
| "epoch": 0.0011053499860449565, |
| "grad_norm": 2564.56103515625, |
| "learning_rate": 1.999999958091028e-07, |
| "loss": 30.4605, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.0011145612359286644, |
| "grad_norm": 2582.181884765625, |
| "learning_rate": 1.9999999537953585e-07, |
| "loss": 29.7207, |
| "step": 1210 |
| }, |
| { |
| "epoch": 0.0011237724858123724, |
| "grad_norm": 2491.742919921875, |
| "learning_rate": 1.999999949290144e-07, |
| "loss": 28.9608, |
| "step": 1220 |
| }, |
| { |
| "epoch": 0.0011329837356960804, |
| "grad_norm": 2497.609130859375, |
| "learning_rate": 1.9999999445753847e-07, |
| "loss": 28.1312, |
| "step": 1230 |
| }, |
| { |
| "epoch": 0.0011421949855797884, |
| "grad_norm": 2473.400390625, |
| "learning_rate": 1.9999999396510806e-07, |
| "loss": 27.3551, |
| "step": 1240 |
| }, |
| { |
| "epoch": 0.0011514062354634964, |
| "grad_norm": 2454.843994140625, |
| "learning_rate": 1.999999934517232e-07, |
| "loss": 26.6391, |
| "step": 1250 |
| }, |
| { |
| "epoch": 0.0011606174853472043, |
| "grad_norm": 2423.966552734375, |
| "learning_rate": 1.9999999291738379e-07, |
| "loss": 26.082, |
| "step": 1260 |
| }, |
| { |
| "epoch": 0.0011698287352309123, |
| "grad_norm": 2410.223876953125, |
| "learning_rate": 1.999999923620899e-07, |
| "loss": 25.2836, |
| "step": 1270 |
| }, |
| { |
| "epoch": 0.00117903998511462, |
| "grad_norm": 2365.123291015625, |
| "learning_rate": 1.9999999178584158e-07, |
| "loss": 24.4937, |
| "step": 1280 |
| }, |
| { |
| "epoch": 0.001188251234998328, |
| "grad_norm": 2398.742919921875, |
| "learning_rate": 1.9999999118863872e-07, |
| "loss": 23.9224, |
| "step": 1290 |
| }, |
| { |
| "epoch": 0.001197462484882036, |
| "grad_norm": 2378.7412109375, |
| "learning_rate": 1.999999905704814e-07, |
| "loss": 23.3241, |
| "step": 1300 |
| }, |
| { |
| "epoch": 0.001206673734765744, |
| "grad_norm": 2325.090087890625, |
| "learning_rate": 1.999999899313696e-07, |
| "loss": 22.6005, |
| "step": 1310 |
| }, |
| { |
| "epoch": 0.001215884984649452, |
| "grad_norm": 2326.560302734375, |
| "learning_rate": 1.999999892713033e-07, |
| "loss": 22.0256, |
| "step": 1320 |
| }, |
| { |
| "epoch": 0.00122509623453316, |
| "grad_norm": 2276.50048828125, |
| "learning_rate": 1.9999998859028254e-07, |
| "loss": 21.3044, |
| "step": 1330 |
| }, |
| { |
| "epoch": 0.001234307484416868, |
| "grad_norm": 2233.469482421875, |
| "learning_rate": 1.9999998788830727e-07, |
| "loss": 20.6893, |
| "step": 1340 |
| }, |
| { |
| "epoch": 0.001243518734300576, |
| "grad_norm": 2200.521484375, |
| "learning_rate": 1.9999998716537757e-07, |
| "loss": 19.9953, |
| "step": 1350 |
| }, |
| { |
| "epoch": 0.001252729984184284, |
| "grad_norm": 2163.321533203125, |
| "learning_rate": 1.9999998642149333e-07, |
| "loss": 19.365, |
| "step": 1360 |
| }, |
| { |
| "epoch": 0.001261941234067992, |
| "grad_norm": 2125.6787109375, |
| "learning_rate": 1.999999856566546e-07, |
| "loss": 18.7637, |
| "step": 1370 |
| }, |
| { |
| "epoch": 0.0012711524839517, |
| "grad_norm": 2124.439697265625, |
| "learning_rate": 1.9999998487086143e-07, |
| "loss": 18.2988, |
| "step": 1380 |
| }, |
| { |
| "epoch": 0.0012803637338354079, |
| "grad_norm": 2077.859619140625, |
| "learning_rate": 1.9999998406411376e-07, |
| "loss": 17.7236, |
| "step": 1390 |
| }, |
| { |
| "epoch": 0.0012895749837191159, |
| "grad_norm": 2045.779541015625, |
| "learning_rate": 1.9999998323641162e-07, |
| "loss": 17.0449, |
| "step": 1400 |
| }, |
| { |
| "epoch": 0.0012987862336028238, |
| "grad_norm": 2026.2315673828125, |
| "learning_rate": 1.9999998238775496e-07, |
| "loss": 16.4903, |
| "step": 1410 |
| }, |
| { |
| "epoch": 0.0013079974834865318, |
| "grad_norm": 1960.8985595703125, |
| "learning_rate": 1.9999998151814385e-07, |
| "loss": 15.9423, |
| "step": 1420 |
| }, |
| { |
| "epoch": 0.0013172087333702398, |
| "grad_norm": 1901.08203125, |
| "learning_rate": 1.9999998062757822e-07, |
| "loss": 15.3386, |
| "step": 1430 |
| }, |
| { |
| "epoch": 0.0013264199832539478, |
| "grad_norm": 1864.98583984375, |
| "learning_rate": 1.9999997971605817e-07, |
| "loss": 14.9109, |
| "step": 1440 |
| }, |
| { |
| "epoch": 0.0013356312331376558, |
| "grad_norm": 1840.9862060546875, |
| "learning_rate": 1.9999997878358358e-07, |
| "loss": 14.4373, |
| "step": 1450 |
| }, |
| { |
| "epoch": 0.0013448424830213637, |
| "grad_norm": 1768.5701904296875, |
| "learning_rate": 1.9999997783015453e-07, |
| "loss": 13.9459, |
| "step": 1460 |
| }, |
| { |
| "epoch": 0.0013540537329050717, |
| "grad_norm": 1757.9100341796875, |
| "learning_rate": 1.99999976855771e-07, |
| "loss": 13.4011, |
| "step": 1470 |
| }, |
| { |
| "epoch": 0.0013632649827887795, |
| "grad_norm": 1714.19384765625, |
| "learning_rate": 1.9999997586043301e-07, |
| "loss": 12.9283, |
| "step": 1480 |
| }, |
| { |
| "epoch": 0.0013724762326724875, |
| "grad_norm": 1660.2532958984375, |
| "learning_rate": 1.9999997484414052e-07, |
| "loss": 12.5959, |
| "step": 1490 |
| }, |
| { |
| "epoch": 0.0013816874825561955, |
| "grad_norm": 1613.7947998046875, |
| "learning_rate": 1.9999997380689356e-07, |
| "loss": 12.1076, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.0013908987324399034, |
| "grad_norm": 1570.248046875, |
| "learning_rate": 1.999999727486921e-07, |
| "loss": 11.6747, |
| "step": 1510 |
| }, |
| { |
| "epoch": 0.0014001099823236114, |
| "grad_norm": 1518.6502685546875, |
| "learning_rate": 1.9999997166953615e-07, |
| "loss": 11.2915, |
| "step": 1520 |
| }, |
| { |
| "epoch": 0.0014093212322073194, |
| "grad_norm": 1464.673095703125, |
| "learning_rate": 1.9999997056942575e-07, |
| "loss": 10.785, |
| "step": 1530 |
| }, |
| { |
| "epoch": 0.0014185324820910274, |
| "grad_norm": 1432.284912109375, |
| "learning_rate": 1.9999996944836086e-07, |
| "loss": 10.4627, |
| "step": 1540 |
| }, |
| { |
| "epoch": 0.0014277437319747354, |
| "grad_norm": 1364.3857421875, |
| "learning_rate": 1.999999683063415e-07, |
| "loss": 10.0882, |
| "step": 1550 |
| }, |
| { |
| "epoch": 0.0014369549818584433, |
| "grad_norm": 1332.6484375, |
| "learning_rate": 1.9999996714336764e-07, |
| "loss": 9.6036, |
| "step": 1560 |
| }, |
| { |
| "epoch": 0.0014461662317421513, |
| "grad_norm": 1285.5634765625, |
| "learning_rate": 1.999999659594393e-07, |
| "loss": 9.3887, |
| "step": 1570 |
| }, |
| { |
| "epoch": 0.0014553774816258593, |
| "grad_norm": 1228.310546875, |
| "learning_rate": 1.999999647545565e-07, |
| "loss": 8.9631, |
| "step": 1580 |
| }, |
| { |
| "epoch": 0.0014645887315095673, |
| "grad_norm": 1193.911865234375, |
| "learning_rate": 1.999999635287192e-07, |
| "loss": 8.6061, |
| "step": 1590 |
| }, |
| { |
| "epoch": 0.0014737999813932753, |
| "grad_norm": 1133.885986328125, |
| "learning_rate": 1.9999996228192744e-07, |
| "loss": 8.3859, |
| "step": 1600 |
| }, |
| { |
| "epoch": 0.0014830112312769832, |
| "grad_norm": 1095.484130859375, |
| "learning_rate": 1.999999610141812e-07, |
| "loss": 8.1791, |
| "step": 1610 |
| }, |
| { |
| "epoch": 0.0014922224811606912, |
| "grad_norm": 1044.8935546875, |
| "learning_rate": 1.9999995972548046e-07, |
| "loss": 7.7655, |
| "step": 1620 |
| }, |
| { |
| "epoch": 0.0015014337310443992, |
| "grad_norm": 1019.5875854492188, |
| "learning_rate": 1.9999995841582525e-07, |
| "loss": 7.5381, |
| "step": 1630 |
| }, |
| { |
| "epoch": 0.0015106449809281072, |
| "grad_norm": 985.502197265625, |
| "learning_rate": 1.999999570852156e-07, |
| "loss": 7.2579, |
| "step": 1640 |
| }, |
| { |
| "epoch": 0.0015198562308118152, |
| "grad_norm": 925.2315673828125, |
| "learning_rate": 1.9999995573365146e-07, |
| "loss": 7.0491, |
| "step": 1650 |
| }, |
| { |
| "epoch": 0.0015290674806955232, |
| "grad_norm": 888.581298828125, |
| "learning_rate": 1.9999995436113282e-07, |
| "loss": 6.7646, |
| "step": 1660 |
| }, |
| { |
| "epoch": 0.001538278730579231, |
| "grad_norm": 858.232421875, |
| "learning_rate": 1.999999529676597e-07, |
| "loss": 6.5387, |
| "step": 1670 |
| }, |
| { |
| "epoch": 0.001547489980462939, |
| "grad_norm": 814.1123046875, |
| "learning_rate": 1.999999515532321e-07, |
| "loss": 6.2748, |
| "step": 1680 |
| }, |
| { |
| "epoch": 0.0015567012303466469, |
| "grad_norm": 772.8284912109375, |
| "learning_rate": 1.9999995011785004e-07, |
| "loss": 6.1093, |
| "step": 1690 |
| }, |
| { |
| "epoch": 0.0015659124802303549, |
| "grad_norm": 759.8836059570312, |
| "learning_rate": 1.999999486615135e-07, |
| "loss": 5.7886, |
| "step": 1700 |
| }, |
| { |
| "epoch": 0.0015751237301140628, |
| "grad_norm": 704.4788208007812, |
| "learning_rate": 1.999999471842225e-07, |
| "loss": 5.6172, |
| "step": 1710 |
| }, |
| { |
| "epoch": 0.0015843349799977708, |
| "grad_norm": 684.6796264648438, |
| "learning_rate": 1.9999994568597702e-07, |
| "loss": 5.4777, |
| "step": 1720 |
| }, |
| { |
| "epoch": 0.0015935462298814788, |
| "grad_norm": 653.7792358398438, |
| "learning_rate": 1.9999994416677705e-07, |
| "loss": 5.2928, |
| "step": 1730 |
| }, |
| { |
| "epoch": 0.0016027574797651868, |
| "grad_norm": 609.8298950195312, |
| "learning_rate": 1.999999426266226e-07, |
| "loss": 5.2036, |
| "step": 1740 |
| }, |
| { |
| "epoch": 0.0016119687296488948, |
| "grad_norm": 592.4334106445312, |
| "learning_rate": 1.9999994106551373e-07, |
| "loss": 4.9767, |
| "step": 1750 |
| }, |
| { |
| "epoch": 0.0016211799795326028, |
| "grad_norm": 561.9069213867188, |
| "learning_rate": 1.999999394834503e-07, |
| "loss": 4.8632, |
| "step": 1760 |
| }, |
| { |
| "epoch": 0.0016303912294163107, |
| "grad_norm": 536.3654174804688, |
| "learning_rate": 1.9999993788043246e-07, |
| "loss": 4.6632, |
| "step": 1770 |
| }, |
| { |
| "epoch": 0.0016396024793000187, |
| "grad_norm": 505.2630615234375, |
| "learning_rate": 1.9999993625646013e-07, |
| "loss": 4.5513, |
| "step": 1780 |
| }, |
| { |
| "epoch": 0.0016488137291837267, |
| "grad_norm": 480.9029541015625, |
| "learning_rate": 1.9999993461153332e-07, |
| "loss": 4.4474, |
| "step": 1790 |
| }, |
| { |
| "epoch": 0.0016580249790674347, |
| "grad_norm": 450.44378662109375, |
| "learning_rate": 1.9999993294565205e-07, |
| "loss": 4.2804, |
| "step": 1800 |
| }, |
| { |
| "epoch": 0.0016672362289511427, |
| "grad_norm": 436.21734619140625, |
| "learning_rate": 1.9999993125881632e-07, |
| "loss": 4.1318, |
| "step": 1810 |
| }, |
| { |
| "epoch": 0.0016764474788348506, |
| "grad_norm": 412.7518310546875, |
| "learning_rate": 1.9999992955102609e-07, |
| "loss": 4.1223, |
| "step": 1820 |
| }, |
| { |
| "epoch": 0.0016856587287185586, |
| "grad_norm": 383.36016845703125, |
| "learning_rate": 1.999999278222814e-07, |
| "loss": 3.9049, |
| "step": 1830 |
| }, |
| { |
| "epoch": 0.0016948699786022666, |
| "grad_norm": 370.7187194824219, |
| "learning_rate": 1.9999992607258224e-07, |
| "loss": 3.8424, |
| "step": 1840 |
| }, |
| { |
| "epoch": 0.0017040812284859746, |
| "grad_norm": 350.1647033691406, |
| "learning_rate": 1.9999992430192863e-07, |
| "loss": 3.7794, |
| "step": 1850 |
| }, |
| { |
| "epoch": 0.0017132924783696826, |
| "grad_norm": 330.06488037109375, |
| "learning_rate": 1.9999992251032052e-07, |
| "loss": 3.5764, |
| "step": 1860 |
| }, |
| { |
| "epoch": 0.0017225037282533903, |
| "grad_norm": 316.21856689453125, |
| "learning_rate": 1.9999992069775794e-07, |
| "loss": 3.5914, |
| "step": 1870 |
| }, |
| { |
| "epoch": 0.0017317149781370983, |
| "grad_norm": 304.57379150390625, |
| "learning_rate": 1.999999188642409e-07, |
| "loss": 3.4422, |
| "step": 1880 |
| }, |
| { |
| "epoch": 0.0017409262280208063, |
| "grad_norm": 279.77081298828125, |
| "learning_rate": 1.999999170097694e-07, |
| "loss": 3.3374, |
| "step": 1890 |
| }, |
| { |
| "epoch": 0.0017501374779045143, |
| "grad_norm": 266.1014099121094, |
| "learning_rate": 1.999999151343434e-07, |
| "loss": 3.2769, |
| "step": 1900 |
| }, |
| { |
| "epoch": 0.0017593487277882223, |
| "grad_norm": 260.19317626953125, |
| "learning_rate": 1.9999991323796297e-07, |
| "loss": 3.2044, |
| "step": 1910 |
| }, |
| { |
| "epoch": 0.0017685599776719302, |
| "grad_norm": 235.43695068359375, |
| "learning_rate": 1.9999991132062806e-07, |
| "loss": 3.238, |
| "step": 1920 |
| }, |
| { |
| "epoch": 0.0017777712275556382, |
| "grad_norm": 225.56936645507812, |
| "learning_rate": 1.9999990938233866e-07, |
| "loss": 3.1216, |
| "step": 1930 |
| }, |
| { |
| "epoch": 0.0017869824774393462, |
| "grad_norm": 212.62403869628906, |
| "learning_rate": 1.9999990742309483e-07, |
| "loss": 3.0801, |
| "step": 1940 |
| }, |
| { |
| "epoch": 0.0017961937273230542, |
| "grad_norm": 204.54310607910156, |
| "learning_rate": 1.999999054428965e-07, |
| "loss": 2.9576, |
| "step": 1950 |
| }, |
| { |
| "epoch": 0.0018054049772067622, |
| "grad_norm": 192.5968475341797, |
| "learning_rate": 1.999999034417437e-07, |
| "loss": 2.8603, |
| "step": 1960 |
| }, |
| { |
| "epoch": 0.0018146162270904701, |
| "grad_norm": 179.8728790283203, |
| "learning_rate": 1.9999990141963648e-07, |
| "loss": 2.7742, |
| "step": 1970 |
| }, |
| { |
| "epoch": 0.0018238274769741781, |
| "grad_norm": 172.84512329101562, |
| "learning_rate": 1.9999989937657475e-07, |
| "loss": 2.744, |
| "step": 1980 |
| }, |
| { |
| "epoch": 0.001833038726857886, |
| "grad_norm": 163.42417907714844, |
| "learning_rate": 1.9999989731255859e-07, |
| "loss": 2.7684, |
| "step": 1990 |
| }, |
| { |
| "epoch": 0.001842249976741594, |
| "grad_norm": 152.5693817138672, |
| "learning_rate": 1.9999989522758794e-07, |
| "loss": 2.6928, |
| "step": 2000 |
| }, |
| { |
| "epoch": 0.001851461226625302, |
| "grad_norm": 147.03887939453125, |
| "learning_rate": 1.9999989312166284e-07, |
| "loss": 2.6041, |
| "step": 2010 |
| }, |
| { |
| "epoch": 0.00186067247650901, |
| "grad_norm": 137.3078155517578, |
| "learning_rate": 1.9999989099478325e-07, |
| "loss": 2.5658, |
| "step": 2020 |
| }, |
| { |
| "epoch": 0.001869883726392718, |
| "grad_norm": 131.5677032470703, |
| "learning_rate": 1.9999988884694923e-07, |
| "loss": 2.5418, |
| "step": 2030 |
| }, |
| { |
| "epoch": 0.001879094976276426, |
| "grad_norm": 129.69793701171875, |
| "learning_rate": 1.9999988667816073e-07, |
| "loss": 2.5778, |
| "step": 2040 |
| }, |
| { |
| "epoch": 0.001888306226160134, |
| "grad_norm": 116.73808288574219, |
| "learning_rate": 1.9999988448841775e-07, |
| "loss": 2.4358, |
| "step": 2050 |
| }, |
| { |
| "epoch": 0.001897517476043842, |
| "grad_norm": 110.0927505493164, |
| "learning_rate": 1.9999988227772033e-07, |
| "loss": 2.3674, |
| "step": 2060 |
| }, |
| { |
| "epoch": 0.0019067287259275497, |
| "grad_norm": 103.76924896240234, |
| "learning_rate": 1.9999988004606846e-07, |
| "loss": 2.3961, |
| "step": 2070 |
| }, |
| { |
| "epoch": 0.0019159399758112577, |
| "grad_norm": 98.94550323486328, |
| "learning_rate": 1.9999987779346213e-07, |
| "loss": 2.2481, |
| "step": 2080 |
| }, |
| { |
| "epoch": 0.0019251512256949657, |
| "grad_norm": 120.63087463378906, |
| "learning_rate": 1.9999987551990132e-07, |
| "loss": 2.401, |
| "step": 2090 |
| }, |
| { |
| "epoch": 0.0019343624755786737, |
| "grad_norm": 89.98589324951172, |
| "learning_rate": 1.9999987322538605e-07, |
| "loss": 2.2563, |
| "step": 2100 |
| }, |
| { |
| "epoch": 0.0019435737254623817, |
| "grad_norm": 83.588623046875, |
| "learning_rate": 1.9999987090991632e-07, |
| "loss": 2.2725, |
| "step": 2110 |
| }, |
| { |
| "epoch": 0.0019527849753460896, |
| "grad_norm": 118.56281280517578, |
| "learning_rate": 1.9999986857349214e-07, |
| "loss": 2.2263, |
| "step": 2120 |
| }, |
| { |
| "epoch": 0.0019619962252297976, |
| "grad_norm": 99.30581665039062, |
| "learning_rate": 1.9999986621611348e-07, |
| "loss": 2.1737, |
| "step": 2130 |
| }, |
| { |
| "epoch": 0.001971207475113506, |
| "grad_norm": 71.56329345703125, |
| "learning_rate": 1.999998638377804e-07, |
| "loss": 2.1267, |
| "step": 2140 |
| }, |
| { |
| "epoch": 0.0019804187249972136, |
| "grad_norm": 68.20590209960938, |
| "learning_rate": 1.9999986143849283e-07, |
| "loss": 2.1793, |
| "step": 2150 |
| }, |
| { |
| "epoch": 0.0019896299748809214, |
| "grad_norm": 65.59575653076172, |
| "learning_rate": 1.9999985901825082e-07, |
| "loss": 2.133, |
| "step": 2160 |
| }, |
| { |
| "epoch": 0.0019988412247646296, |
| "grad_norm": 60.48945236206055, |
| "learning_rate": 1.9999985657705435e-07, |
| "loss": 2.1033, |
| "step": 2170 |
| }, |
| { |
| "epoch": 0.0020080524746483373, |
| "grad_norm": 58.71857833862305, |
| "learning_rate": 1.9999985411490342e-07, |
| "loss": 2.1598, |
| "step": 2180 |
| }, |
| { |
| "epoch": 0.0020172637245320455, |
| "grad_norm": 56.29412078857422, |
| "learning_rate": 1.9999985163179804e-07, |
| "loss": 2.0422, |
| "step": 2190 |
| }, |
| { |
| "epoch": 0.0020264749744157533, |
| "grad_norm": 53.31063461303711, |
| "learning_rate": 1.999998491277382e-07, |
| "loss": 2.0283, |
| "step": 2200 |
| }, |
| { |
| "epoch": 0.0020356862242994615, |
| "grad_norm": 49.65985870361328, |
| "learning_rate": 1.9999984660272394e-07, |
| "loss": 1.9932, |
| "step": 2210 |
| }, |
| { |
| "epoch": 0.0020448974741831692, |
| "grad_norm": 47.551918029785156, |
| "learning_rate": 1.9999984405675519e-07, |
| "loss": 2.0264, |
| "step": 2220 |
| }, |
| { |
| "epoch": 0.0020541087240668774, |
| "grad_norm": 46.115474700927734, |
| "learning_rate": 1.9999984148983198e-07, |
| "loss": 1.9898, |
| "step": 2230 |
| }, |
| { |
| "epoch": 0.002063319973950585, |
| "grad_norm": 43.81364822387695, |
| "learning_rate": 1.9999983890195434e-07, |
| "loss": 1.9909, |
| "step": 2240 |
| }, |
| { |
| "epoch": 0.0020725312238342934, |
| "grad_norm": 41.941673278808594, |
| "learning_rate": 1.9999983629312221e-07, |
| "loss": 1.9844, |
| "step": 2250 |
| }, |
| { |
| "epoch": 0.002081742473718001, |
| "grad_norm": 39.945003509521484, |
| "learning_rate": 1.999998336633357e-07, |
| "loss": 1.9837, |
| "step": 2260 |
| }, |
| { |
| "epoch": 0.0020909537236017094, |
| "grad_norm": 39.571205139160156, |
| "learning_rate": 1.9999983101259468e-07, |
| "loss": 1.8766, |
| "step": 2270 |
| }, |
| { |
| "epoch": 0.002100164973485417, |
| "grad_norm": 35.92873764038086, |
| "learning_rate": 1.999998283408992e-07, |
| "loss": 1.8292, |
| "step": 2280 |
| }, |
| { |
| "epoch": 0.0021093762233691253, |
| "grad_norm": 35.03543472290039, |
| "learning_rate": 1.9999982564824931e-07, |
| "loss": 1.9074, |
| "step": 2290 |
| }, |
| { |
| "epoch": 0.002118587473252833, |
| "grad_norm": 35.02627944946289, |
| "learning_rate": 1.9999982293464493e-07, |
| "loss": 1.8396, |
| "step": 2300 |
| }, |
| { |
| "epoch": 0.0021277987231365413, |
| "grad_norm": 34.62958908081055, |
| "learning_rate": 1.9999982020008615e-07, |
| "loss": 1.8806, |
| "step": 2310 |
| }, |
| { |
| "epoch": 0.002137009973020249, |
| "grad_norm": 32.987667083740234, |
| "learning_rate": 1.9999981744457288e-07, |
| "loss": 1.7604, |
| "step": 2320 |
| }, |
| { |
| "epoch": 0.0021462212229039573, |
| "grad_norm": 35.742733001708984, |
| "learning_rate": 1.9999981466810518e-07, |
| "loss": 1.7845, |
| "step": 2330 |
| }, |
| { |
| "epoch": 0.002155432472787665, |
| "grad_norm": 40.51860427856445, |
| "learning_rate": 1.9999981187068303e-07, |
| "loss": 1.7774, |
| "step": 2340 |
| }, |
| { |
| "epoch": 0.002164643722671373, |
| "grad_norm": 33.92218780517578, |
| "learning_rate": 1.9999980905230641e-07, |
| "loss": 1.7653, |
| "step": 2350 |
| }, |
| { |
| "epoch": 0.002173854972555081, |
| "grad_norm": 31.67229461669922, |
| "learning_rate": 1.999998062129754e-07, |
| "loss": 1.7301, |
| "step": 2360 |
| }, |
| { |
| "epoch": 0.0021830662224387887, |
| "grad_norm": 33.84907913208008, |
| "learning_rate": 1.999998033526899e-07, |
| "loss": 1.7606, |
| "step": 2370 |
| }, |
| { |
| "epoch": 0.002192277472322497, |
| "grad_norm": 31.650222778320312, |
| "learning_rate": 1.9999980047144997e-07, |
| "loss": 1.6891, |
| "step": 2380 |
| }, |
| { |
| "epoch": 0.0022014887222062047, |
| "grad_norm": 29.637920379638672, |
| "learning_rate": 1.999997975692556e-07, |
| "loss": 1.7196, |
| "step": 2390 |
| }, |
| { |
| "epoch": 0.002210699972089913, |
| "grad_norm": 33.537841796875, |
| "learning_rate": 1.9999979464610676e-07, |
| "loss": 1.6802, |
| "step": 2400 |
| }, |
| { |
| "epoch": 0.0022199112219736207, |
| "grad_norm": 32.58498001098633, |
| "learning_rate": 1.9999979170200352e-07, |
| "loss": 1.7063, |
| "step": 2410 |
| }, |
| { |
| "epoch": 0.002229122471857329, |
| "grad_norm": 30.860231399536133, |
| "learning_rate": 1.999997887369458e-07, |
| "loss": 1.7429, |
| "step": 2420 |
| }, |
| { |
| "epoch": 0.0022383337217410366, |
| "grad_norm": 29.551433563232422, |
| "learning_rate": 1.9999978575093368e-07, |
| "loss": 1.6311, |
| "step": 2430 |
| }, |
| { |
| "epoch": 0.002247544971624745, |
| "grad_norm": 27.05377769470215, |
| "learning_rate": 1.9999978274396708e-07, |
| "loss": 1.5905, |
| "step": 2440 |
| }, |
| { |
| "epoch": 0.0022567562215084526, |
| "grad_norm": 33.77758026123047, |
| "learning_rate": 1.9999977971604604e-07, |
| "loss": 1.6453, |
| "step": 2450 |
| }, |
| { |
| "epoch": 0.002265967471392161, |
| "grad_norm": 27.157777786254883, |
| "learning_rate": 1.9999977666717056e-07, |
| "loss": 1.6, |
| "step": 2460 |
| }, |
| { |
| "epoch": 0.0022751787212758686, |
| "grad_norm": 27.114534378051758, |
| "learning_rate": 1.9999977359734067e-07, |
| "loss": 1.5974, |
| "step": 2470 |
| }, |
| { |
| "epoch": 0.0022843899711595768, |
| "grad_norm": 39.75688171386719, |
| "learning_rate": 1.9999977050655633e-07, |
| "loss": 1.5736, |
| "step": 2480 |
| }, |
| { |
| "epoch": 0.0022936012210432845, |
| "grad_norm": 158.88650512695312, |
| "learning_rate": 1.9999976739481753e-07, |
| "loss": 1.5226, |
| "step": 2490 |
| }, |
| { |
| "epoch": 0.0023028124709269927, |
| "grad_norm": 33.52450180053711, |
| "learning_rate": 1.9999976426212433e-07, |
| "loss": 1.5865, |
| "step": 2500 |
| }, |
| { |
| "epoch": 0.0023120237208107005, |
| "grad_norm": 31.712081909179688, |
| "learning_rate": 1.9999976110847667e-07, |
| "loss": 1.6243, |
| "step": 2510 |
| }, |
| { |
| "epoch": 0.0023212349706944087, |
| "grad_norm": 24.07305335998535, |
| "learning_rate": 1.9999975793387456e-07, |
| "loss": 1.533, |
| "step": 2520 |
| }, |
| { |
| "epoch": 0.0023304462205781164, |
| "grad_norm": 23.818401336669922, |
| "learning_rate": 1.9999975473831801e-07, |
| "loss": 1.5037, |
| "step": 2530 |
| }, |
| { |
| "epoch": 0.0023396574704618246, |
| "grad_norm": 21.57187843322754, |
| "learning_rate": 1.999997515218071e-07, |
| "loss": 1.4924, |
| "step": 2540 |
| }, |
| { |
| "epoch": 0.0023488687203455324, |
| "grad_norm": 29.550159454345703, |
| "learning_rate": 1.9999974828434168e-07, |
| "loss": 1.5163, |
| "step": 2550 |
| }, |
| { |
| "epoch": 0.00235807997022924, |
| "grad_norm": 22.988330841064453, |
| "learning_rate": 1.9999974502592182e-07, |
| "loss": 1.5105, |
| "step": 2560 |
| }, |
| { |
| "epoch": 0.0023672912201129484, |
| "grad_norm": 24.00938606262207, |
| "learning_rate": 1.9999974174654758e-07, |
| "loss": 1.5162, |
| "step": 2570 |
| }, |
| { |
| "epoch": 0.002376502469996656, |
| "grad_norm": 25.99742317199707, |
| "learning_rate": 1.9999973844621886e-07, |
| "loss": 1.516, |
| "step": 2580 |
| }, |
| { |
| "epoch": 0.0023857137198803643, |
| "grad_norm": 25.696409225463867, |
| "learning_rate": 1.9999973512493575e-07, |
| "loss": 1.5038, |
| "step": 2590 |
| }, |
| { |
| "epoch": 0.002394924969764072, |
| "grad_norm": 20.570842742919922, |
| "learning_rate": 1.999997317826982e-07, |
| "loss": 1.5449, |
| "step": 2600 |
| }, |
| { |
| "epoch": 0.0024041362196477803, |
| "grad_norm": 20.99848175048828, |
| "learning_rate": 1.999997284195062e-07, |
| "loss": 1.4674, |
| "step": 2610 |
| }, |
| { |
| "epoch": 0.002413347469531488, |
| "grad_norm": 31.897750854492188, |
| "learning_rate": 1.9999972503535979e-07, |
| "loss": 1.4313, |
| "step": 2620 |
| }, |
| { |
| "epoch": 0.0024225587194151963, |
| "grad_norm": 20.204248428344727, |
| "learning_rate": 1.9999972163025894e-07, |
| "loss": 1.4793, |
| "step": 2630 |
| }, |
| { |
| "epoch": 0.002431769969298904, |
| "grad_norm": 19.503555297851562, |
| "learning_rate": 1.9999971820420366e-07, |
| "loss": 1.4242, |
| "step": 2640 |
| }, |
| { |
| "epoch": 0.0024409812191826122, |
| "grad_norm": 25.30314826965332, |
| "learning_rate": 1.9999971475719395e-07, |
| "loss": 1.454, |
| "step": 2650 |
| }, |
| { |
| "epoch": 0.00245019246906632, |
| "grad_norm": 19.37495231628418, |
| "learning_rate": 1.9999971128922983e-07, |
| "loss": 1.4861, |
| "step": 2660 |
| }, |
| { |
| "epoch": 0.002459403718950028, |
| "grad_norm": 22.25645637512207, |
| "learning_rate": 1.9999970780031128e-07, |
| "loss": 1.4851, |
| "step": 2670 |
| }, |
| { |
| "epoch": 0.002468614968833736, |
| "grad_norm": 18.460275650024414, |
| "learning_rate": 1.999997042904383e-07, |
| "loss": 1.3875, |
| "step": 2680 |
| }, |
| { |
| "epoch": 0.002477826218717444, |
| "grad_norm": 18.969640731811523, |
| "learning_rate": 1.999997007596109e-07, |
| "loss": 1.4181, |
| "step": 2690 |
| }, |
| { |
| "epoch": 0.002487037468601152, |
| "grad_norm": 22.112180709838867, |
| "learning_rate": 1.9999969720782907e-07, |
| "loss": 1.4746, |
| "step": 2700 |
| }, |
| { |
| "epoch": 0.00249624871848486, |
| "grad_norm": 18.481765747070312, |
| "learning_rate": 1.9999969363509283e-07, |
| "loss": 1.4108, |
| "step": 2710 |
| }, |
| { |
| "epoch": 0.002505459968368568, |
| "grad_norm": 16.51472282409668, |
| "learning_rate": 1.9999969004140215e-07, |
| "loss": 1.3617, |
| "step": 2720 |
| }, |
| { |
| "epoch": 0.002514671218252276, |
| "grad_norm": 20.868471145629883, |
| "learning_rate": 1.9999968642675705e-07, |
| "loss": 1.4746, |
| "step": 2730 |
| }, |
| { |
| "epoch": 0.002523882468135984, |
| "grad_norm": 16.16942024230957, |
| "learning_rate": 1.9999968279115755e-07, |
| "loss": 1.4588, |
| "step": 2740 |
| }, |
| { |
| "epoch": 0.0025330937180196916, |
| "grad_norm": 16.831565856933594, |
| "learning_rate": 1.9999967913460364e-07, |
| "loss": 1.3538, |
| "step": 2750 |
| }, |
| { |
| "epoch": 0.0025423049679034, |
| "grad_norm": 26.866472244262695, |
| "learning_rate": 1.9999967545709527e-07, |
| "loss": 1.4479, |
| "step": 2760 |
| }, |
| { |
| "epoch": 0.0025515162177871076, |
| "grad_norm": 21.425960540771484, |
| "learning_rate": 1.999996717586325e-07, |
| "loss": 1.5059, |
| "step": 2770 |
| }, |
| { |
| "epoch": 0.0025607274676708158, |
| "grad_norm": 23.79142189025879, |
| "learning_rate": 1.999996680392153e-07, |
| "loss": 1.3903, |
| "step": 2780 |
| }, |
| { |
| "epoch": 0.0025699387175545235, |
| "grad_norm": 15.220338821411133, |
| "learning_rate": 1.9999966429884372e-07, |
| "loss": 1.3858, |
| "step": 2790 |
| }, |
| { |
| "epoch": 0.0025791499674382317, |
| "grad_norm": 16.26853370666504, |
| "learning_rate": 1.999996605375177e-07, |
| "loss": 1.3583, |
| "step": 2800 |
| }, |
| { |
| "epoch": 0.0025883612173219395, |
| "grad_norm": 14.986100196838379, |
| "learning_rate": 1.9999965675523727e-07, |
| "loss": 1.4296, |
| "step": 2810 |
| }, |
| { |
| "epoch": 0.0025975724672056477, |
| "grad_norm": 14.806829452514648, |
| "learning_rate": 1.999996529520024e-07, |
| "loss": 1.3297, |
| "step": 2820 |
| }, |
| { |
| "epoch": 0.0026067837170893554, |
| "grad_norm": 39.35093688964844, |
| "learning_rate": 1.9999964912781318e-07, |
| "loss": 1.2997, |
| "step": 2830 |
| }, |
| { |
| "epoch": 0.0026159949669730636, |
| "grad_norm": 15.718576431274414, |
| "learning_rate": 1.999996452826695e-07, |
| "loss": 1.3506, |
| "step": 2840 |
| }, |
| { |
| "epoch": 0.0026252062168567714, |
| "grad_norm": 15.745109558105469, |
| "learning_rate": 1.999996414165714e-07, |
| "loss": 1.3963, |
| "step": 2850 |
| }, |
| { |
| "epoch": 0.0026344174667404796, |
| "grad_norm": 19.66170883178711, |
| "learning_rate": 1.999996375295189e-07, |
| "loss": 1.3239, |
| "step": 2860 |
| }, |
| { |
| "epoch": 0.0026436287166241874, |
| "grad_norm": 16.708120346069336, |
| "learning_rate": 1.99999633621512e-07, |
| "loss": 1.3639, |
| "step": 2870 |
| }, |
| { |
| "epoch": 0.0026528399665078956, |
| "grad_norm": 51.22215270996094, |
| "learning_rate": 1.999996296925507e-07, |
| "loss": 1.3253, |
| "step": 2880 |
| }, |
| { |
| "epoch": 0.0026620512163916033, |
| "grad_norm": 14.192142486572266, |
| "learning_rate": 1.9999962574263498e-07, |
| "loss": 1.3967, |
| "step": 2890 |
| }, |
| { |
| "epoch": 0.0026712624662753115, |
| "grad_norm": 17.92701530456543, |
| "learning_rate": 1.9999962177176482e-07, |
| "loss": 1.3653, |
| "step": 2900 |
| }, |
| { |
| "epoch": 0.0026804737161590193, |
| "grad_norm": 14.899495124816895, |
| "learning_rate": 1.999996177799403e-07, |
| "loss": 1.3453, |
| "step": 2910 |
| }, |
| { |
| "epoch": 0.0026896849660427275, |
| "grad_norm": 17.192337036132812, |
| "learning_rate": 1.9999961376716136e-07, |
| "loss": 1.304, |
| "step": 2920 |
| }, |
| { |
| "epoch": 0.0026988962159264353, |
| "grad_norm": 15.120102882385254, |
| "learning_rate": 1.99999609733428e-07, |
| "loss": 1.3868, |
| "step": 2930 |
| }, |
| { |
| "epoch": 0.0027081074658101435, |
| "grad_norm": 14.252399444580078, |
| "learning_rate": 1.9999960567874025e-07, |
| "loss": 1.337, |
| "step": 2940 |
| }, |
| { |
| "epoch": 0.0027173187156938512, |
| "grad_norm": 26.42819595336914, |
| "learning_rate": 1.999996016030981e-07, |
| "loss": 1.3001, |
| "step": 2950 |
| }, |
| { |
| "epoch": 0.002726529965577559, |
| "grad_norm": 13.364202499389648, |
| "learning_rate": 1.9999959750650155e-07, |
| "loss": 1.338, |
| "step": 2960 |
| }, |
| { |
| "epoch": 0.002735741215461267, |
| "grad_norm": 16.093856811523438, |
| "learning_rate": 1.999995933889506e-07, |
| "loss": 1.3237, |
| "step": 2970 |
| }, |
| { |
| "epoch": 0.002744952465344975, |
| "grad_norm": 22.582176208496094, |
| "learning_rate": 1.9999958925044523e-07, |
| "loss": 1.3233, |
| "step": 2980 |
| }, |
| { |
| "epoch": 0.002754163715228683, |
| "grad_norm": 14.32443618774414, |
| "learning_rate": 1.9999958509098547e-07, |
| "loss": 1.2914, |
| "step": 2990 |
| }, |
| { |
| "epoch": 0.002763374965112391, |
| "grad_norm": 16.472333908081055, |
| "learning_rate": 1.999995809105713e-07, |
| "loss": 1.325, |
| "step": 3000 |
| }, |
| { |
| "epoch": 0.002772586214996099, |
| "grad_norm": 15.594547271728516, |
| "learning_rate": 1.9999957670920273e-07, |
| "loss": 1.3665, |
| "step": 3010 |
| }, |
| { |
| "epoch": 0.002781797464879807, |
| "grad_norm": 14.634944915771484, |
| "learning_rate": 1.999995724868798e-07, |
| "loss": 1.3493, |
| "step": 3020 |
| }, |
| { |
| "epoch": 0.002791008714763515, |
| "grad_norm": 18.975008010864258, |
| "learning_rate": 1.9999956824360243e-07, |
| "loss": 1.3162, |
| "step": 3030 |
| }, |
| { |
| "epoch": 0.002800219964647223, |
| "grad_norm": 16.842012405395508, |
| "learning_rate": 1.999995639793707e-07, |
| "loss": 1.3302, |
| "step": 3040 |
| }, |
| { |
| "epoch": 0.002809431214530931, |
| "grad_norm": 13.870977401733398, |
| "learning_rate": 1.9999955969418453e-07, |
| "loss": 1.3073, |
| "step": 3050 |
| }, |
| { |
| "epoch": 0.002818642464414639, |
| "grad_norm": 22.554447174072266, |
| "learning_rate": 1.99999555388044e-07, |
| "loss": 1.2565, |
| "step": 3060 |
| }, |
| { |
| "epoch": 0.002827853714298347, |
| "grad_norm": 14.861679077148438, |
| "learning_rate": 1.9999955106094908e-07, |
| "loss": 1.267, |
| "step": 3070 |
| }, |
| { |
| "epoch": 0.0028370649641820548, |
| "grad_norm": 13.033528327941895, |
| "learning_rate": 1.9999954671289976e-07, |
| "loss": 1.2697, |
| "step": 3080 |
| }, |
| { |
| "epoch": 0.002846276214065763, |
| "grad_norm": 23.113231658935547, |
| "learning_rate": 1.9999954234389604e-07, |
| "loss": 1.3274, |
| "step": 3090 |
| }, |
| { |
| "epoch": 0.0028554874639494707, |
| "grad_norm": 13.699479103088379, |
| "learning_rate": 1.9999953795393793e-07, |
| "loss": 1.3469, |
| "step": 3100 |
| }, |
| { |
| "epoch": 0.002864698713833179, |
| "grad_norm": 13.436897277832031, |
| "learning_rate": 1.9999953354302543e-07, |
| "loss": 1.2749, |
| "step": 3110 |
| }, |
| { |
| "epoch": 0.0028739099637168867, |
| "grad_norm": 13.446366310119629, |
| "learning_rate": 1.9999952911115857e-07, |
| "loss": 1.305, |
| "step": 3120 |
| }, |
| { |
| "epoch": 0.002883121213600595, |
| "grad_norm": 15.052694320678711, |
| "learning_rate": 1.9999952465833728e-07, |
| "loss": 1.3525, |
| "step": 3130 |
| }, |
| { |
| "epoch": 0.0028923324634843026, |
| "grad_norm": 15.465704917907715, |
| "learning_rate": 1.9999952018456162e-07, |
| "loss": 1.3335, |
| "step": 3140 |
| }, |
| { |
| "epoch": 0.0029015437133680104, |
| "grad_norm": 15.083033561706543, |
| "learning_rate": 1.9999951568983163e-07, |
| "loss": 1.2488, |
| "step": 3150 |
| }, |
| { |
| "epoch": 0.0029107549632517186, |
| "grad_norm": 24.209396362304688, |
| "learning_rate": 1.9999951117414719e-07, |
| "loss": 1.3009, |
| "step": 3160 |
| }, |
| { |
| "epoch": 0.0029199662131354264, |
| "grad_norm": 14.542576789855957, |
| "learning_rate": 1.999995066375084e-07, |
| "loss": 1.307, |
| "step": 3170 |
| }, |
| { |
| "epoch": 0.0029291774630191346, |
| "grad_norm": 15.570143699645996, |
| "learning_rate": 1.999995020799152e-07, |
| "loss": 1.2941, |
| "step": 3180 |
| }, |
| { |
| "epoch": 0.0029383887129028423, |
| "grad_norm": 18.506637573242188, |
| "learning_rate": 1.999994975013676e-07, |
| "loss": 1.2771, |
| "step": 3190 |
| }, |
| { |
| "epoch": 0.0029475999627865505, |
| "grad_norm": 22.15712547302246, |
| "learning_rate": 1.9999949290186565e-07, |
| "loss": 1.2634, |
| "step": 3200 |
| }, |
| { |
| "epoch": 0.0029568112126702583, |
| "grad_norm": 13.421778678894043, |
| "learning_rate": 1.9999948828140935e-07, |
| "loss": 1.2635, |
| "step": 3210 |
| }, |
| { |
| "epoch": 0.0029660224625539665, |
| "grad_norm": 13.892674446105957, |
| "learning_rate": 1.999994836399986e-07, |
| "loss": 1.267, |
| "step": 3220 |
| }, |
| { |
| "epoch": 0.0029752337124376743, |
| "grad_norm": 14.89895248413086, |
| "learning_rate": 1.9999947897763355e-07, |
| "loss": 1.2988, |
| "step": 3230 |
| }, |
| { |
| "epoch": 0.0029844449623213825, |
| "grad_norm": 16.86262321472168, |
| "learning_rate": 1.9999947429431405e-07, |
| "loss": 1.2929, |
| "step": 3240 |
| }, |
| { |
| "epoch": 0.0029936562122050902, |
| "grad_norm": 14.850977897644043, |
| "learning_rate": 1.999994695900402e-07, |
| "loss": 1.2374, |
| "step": 3250 |
| }, |
| { |
| "epoch": 0.0030028674620887984, |
| "grad_norm": 15.190448760986328, |
| "learning_rate": 1.9999946486481202e-07, |
| "loss": 1.2602, |
| "step": 3260 |
| }, |
| { |
| "epoch": 0.003012078711972506, |
| "grad_norm": 12.43272876739502, |
| "learning_rate": 1.9999946011862942e-07, |
| "loss": 1.3846, |
| "step": 3270 |
| }, |
| { |
| "epoch": 0.0030212899618562144, |
| "grad_norm": 34.73705291748047, |
| "learning_rate": 1.9999945535149244e-07, |
| "loss": 1.3473, |
| "step": 3280 |
| }, |
| { |
| "epoch": 0.003030501211739922, |
| "grad_norm": 14.248456001281738, |
| "learning_rate": 1.9999945056340111e-07, |
| "loss": 1.3025, |
| "step": 3290 |
| }, |
| { |
| "epoch": 0.0030397124616236303, |
| "grad_norm": 13.245903968811035, |
| "learning_rate": 1.999994457543554e-07, |
| "loss": 1.2935, |
| "step": 3300 |
| }, |
| { |
| "epoch": 0.003048923711507338, |
| "grad_norm": 13.939699172973633, |
| "learning_rate": 1.9999944092435533e-07, |
| "loss": 1.2136, |
| "step": 3310 |
| }, |
| { |
| "epoch": 0.0030581349613910463, |
| "grad_norm": 11.970207214355469, |
| "learning_rate": 1.999994360734009e-07, |
| "loss": 1.3425, |
| "step": 3320 |
| }, |
| { |
| "epoch": 0.003067346211274754, |
| "grad_norm": 15.683432579040527, |
| "learning_rate": 1.9999943120149206e-07, |
| "loss": 1.2758, |
| "step": 3330 |
| }, |
| { |
| "epoch": 0.003076557461158462, |
| "grad_norm": 16.502229690551758, |
| "learning_rate": 1.9999942630862887e-07, |
| "loss": 1.2562, |
| "step": 3340 |
| }, |
| { |
| "epoch": 0.00308576871104217, |
| "grad_norm": 14.47521686553955, |
| "learning_rate": 1.9999942139481133e-07, |
| "loss": 1.2208, |
| "step": 3350 |
| }, |
| { |
| "epoch": 0.003094979960925878, |
| "grad_norm": 11.98067569732666, |
| "learning_rate": 1.999994164600394e-07, |
| "loss": 1.2211, |
| "step": 3360 |
| }, |
| { |
| "epoch": 0.003104191210809586, |
| "grad_norm": 13.58169937133789, |
| "learning_rate": 1.9999941150431314e-07, |
| "loss": 1.2477, |
| "step": 3370 |
| }, |
| { |
| "epoch": 0.0031134024606932938, |
| "grad_norm": 65.29041290283203, |
| "learning_rate": 1.9999940652763253e-07, |
| "loss": 1.24, |
| "step": 3380 |
| }, |
| { |
| "epoch": 0.003122613710577002, |
| "grad_norm": 18.375919342041016, |
| "learning_rate": 1.999994015299975e-07, |
| "loss": 1.2704, |
| "step": 3390 |
| }, |
| { |
| "epoch": 0.0031318249604607097, |
| "grad_norm": 12.73376178741455, |
| "learning_rate": 1.9999939651140813e-07, |
| "loss": 1.2643, |
| "step": 3400 |
| }, |
| { |
| "epoch": 0.003141036210344418, |
| "grad_norm": 13.239251136779785, |
| "learning_rate": 1.9999939147186443e-07, |
| "loss": 1.2315, |
| "step": 3410 |
| }, |
| { |
| "epoch": 0.0031502474602281257, |
| "grad_norm": 14.189347267150879, |
| "learning_rate": 1.9999938641136636e-07, |
| "loss": 1.2195, |
| "step": 3420 |
| }, |
| { |
| "epoch": 0.003159458710111834, |
| "grad_norm": 22.244070053100586, |
| "learning_rate": 1.9999938132991393e-07, |
| "loss": 1.2834, |
| "step": 3430 |
| }, |
| { |
| "epoch": 0.0031686699599955417, |
| "grad_norm": 14.164774894714355, |
| "learning_rate": 1.9999937622750712e-07, |
| "loss": 1.2747, |
| "step": 3440 |
| }, |
| { |
| "epoch": 0.00317788120987925, |
| "grad_norm": 14.640289306640625, |
| "learning_rate": 1.9999937110414597e-07, |
| "loss": 1.2481, |
| "step": 3450 |
| }, |
| { |
| "epoch": 0.0031870924597629576, |
| "grad_norm": 12.531847953796387, |
| "learning_rate": 1.9999936595983046e-07, |
| "loss": 1.3287, |
| "step": 3460 |
| }, |
| { |
| "epoch": 0.003196303709646666, |
| "grad_norm": 16.351123809814453, |
| "learning_rate": 1.999993607945606e-07, |
| "loss": 1.3075, |
| "step": 3470 |
| }, |
| { |
| "epoch": 0.0032055149595303736, |
| "grad_norm": 16.977691650390625, |
| "learning_rate": 1.999993556083364e-07, |
| "loss": 1.2343, |
| "step": 3480 |
| }, |
| { |
| "epoch": 0.0032147262094140818, |
| "grad_norm": 17.62845802307129, |
| "learning_rate": 1.9999935040115786e-07, |
| "loss": 1.2864, |
| "step": 3490 |
| }, |
| { |
| "epoch": 0.0032239374592977895, |
| "grad_norm": 13.497228622436523, |
| "learning_rate": 1.9999934517302495e-07, |
| "loss": 1.2355, |
| "step": 3500 |
| }, |
| { |
| "epoch": 0.0032331487091814977, |
| "grad_norm": 15.825624465942383, |
| "learning_rate": 1.999993399239377e-07, |
| "loss": 1.1814, |
| "step": 3510 |
| }, |
| { |
| "epoch": 0.0032423599590652055, |
| "grad_norm": 14.071992874145508, |
| "learning_rate": 1.9999933465389608e-07, |
| "loss": 1.2022, |
| "step": 3520 |
| }, |
| { |
| "epoch": 0.0032515712089489137, |
| "grad_norm": 15.83365249633789, |
| "learning_rate": 1.9999932936290014e-07, |
| "loss": 1.2169, |
| "step": 3530 |
| }, |
| { |
| "epoch": 0.0032607824588326215, |
| "grad_norm": 11.978744506835938, |
| "learning_rate": 1.9999932405094983e-07, |
| "loss": 1.2451, |
| "step": 3540 |
| }, |
| { |
| "epoch": 0.0032699937087163292, |
| "grad_norm": 17.41489028930664, |
| "learning_rate": 1.999993187180452e-07, |
| "loss": 1.1934, |
| "step": 3550 |
| }, |
| { |
| "epoch": 0.0032792049586000374, |
| "grad_norm": 12.351943016052246, |
| "learning_rate": 1.999993133641862e-07, |
| "loss": 1.2115, |
| "step": 3560 |
| }, |
| { |
| "epoch": 0.003288416208483745, |
| "grad_norm": 14.460607528686523, |
| "learning_rate": 1.9999930798937288e-07, |
| "loss": 1.2248, |
| "step": 3570 |
| }, |
| { |
| "epoch": 0.0032976274583674534, |
| "grad_norm": 13.68131160736084, |
| "learning_rate": 1.9999930259360522e-07, |
| "loss": 1.2737, |
| "step": 3580 |
| }, |
| { |
| "epoch": 0.003306838708251161, |
| "grad_norm": 14.222561836242676, |
| "learning_rate": 1.999992971768832e-07, |
| "loss": 1.2344, |
| "step": 3590 |
| }, |
| { |
| "epoch": 0.0033160499581348694, |
| "grad_norm": 12.381871223449707, |
| "learning_rate": 1.9999929173920686e-07, |
| "loss": 1.2382, |
| "step": 3600 |
| }, |
| { |
| "epoch": 0.003325261208018577, |
| "grad_norm": 15.108046531677246, |
| "learning_rate": 1.9999928628057616e-07, |
| "loss": 1.2783, |
| "step": 3610 |
| }, |
| { |
| "epoch": 0.0033344724579022853, |
| "grad_norm": 14.082871437072754, |
| "learning_rate": 1.9999928080099115e-07, |
| "loss": 1.2493, |
| "step": 3620 |
| }, |
| { |
| "epoch": 0.003343683707785993, |
| "grad_norm": 12.710875511169434, |
| "learning_rate": 1.999992753004518e-07, |
| "loss": 1.1653, |
| "step": 3630 |
| }, |
| { |
| "epoch": 0.0033528949576697013, |
| "grad_norm": 13.085129737854004, |
| "learning_rate": 1.9999926977895809e-07, |
| "loss": 1.1699, |
| "step": 3640 |
| }, |
| { |
| "epoch": 0.003362106207553409, |
| "grad_norm": 12.283415794372559, |
| "learning_rate": 1.9999926423651007e-07, |
| "loss": 1.2873, |
| "step": 3650 |
| }, |
| { |
| "epoch": 0.0033713174574371172, |
| "grad_norm": 13.496448516845703, |
| "learning_rate": 1.9999925867310773e-07, |
| "loss": 1.201, |
| "step": 3660 |
| }, |
| { |
| "epoch": 0.003380528707320825, |
| "grad_norm": 26.06830406188965, |
| "learning_rate": 1.9999925308875103e-07, |
| "loss": 1.263, |
| "step": 3670 |
| }, |
| { |
| "epoch": 0.003389739957204533, |
| "grad_norm": 15.78938102722168, |
| "learning_rate": 1.9999924748344002e-07, |
| "loss": 1.2242, |
| "step": 3680 |
| }, |
| { |
| "epoch": 0.003398951207088241, |
| "grad_norm": 13.02888298034668, |
| "learning_rate": 1.9999924185717468e-07, |
| "loss": 1.1962, |
| "step": 3690 |
| }, |
| { |
| "epoch": 0.003408162456971949, |
| "grad_norm": 11.474292755126953, |
| "learning_rate": 1.99999236209955e-07, |
| "loss": 1.216, |
| "step": 3700 |
| }, |
| { |
| "epoch": 0.003417373706855657, |
| "grad_norm": 11.879030227661133, |
| "learning_rate": 1.9999923054178102e-07, |
| "loss": 1.2036, |
| "step": 3710 |
| }, |
| { |
| "epoch": 0.003426584956739365, |
| "grad_norm": 13.251056671142578, |
| "learning_rate": 1.9999922485265268e-07, |
| "loss": 1.2256, |
| "step": 3720 |
| }, |
| { |
| "epoch": 0.003435796206623073, |
| "grad_norm": 13.749625205993652, |
| "learning_rate": 1.9999921914257003e-07, |
| "loss": 1.2299, |
| "step": 3730 |
| }, |
| { |
| "epoch": 0.0034450074565067807, |
| "grad_norm": 16.408334732055664, |
| "learning_rate": 1.9999921341153304e-07, |
| "loss": 1.2391, |
| "step": 3740 |
| }, |
| { |
| "epoch": 0.003454218706390489, |
| "grad_norm": 17.28126335144043, |
| "learning_rate": 1.9999920765954178e-07, |
| "loss": 1.2027, |
| "step": 3750 |
| }, |
| { |
| "epoch": 0.0034634299562741966, |
| "grad_norm": 13.930543899536133, |
| "learning_rate": 1.999992018865962e-07, |
| "loss": 1.2714, |
| "step": 3760 |
| }, |
| { |
| "epoch": 0.003472641206157905, |
| "grad_norm": 15.985444068908691, |
| "learning_rate": 1.9999919609269625e-07, |
| "loss": 1.2693, |
| "step": 3770 |
| }, |
| { |
| "epoch": 0.0034818524560416126, |
| "grad_norm": 12.719490051269531, |
| "learning_rate": 1.99999190277842e-07, |
| "loss": 1.1994, |
| "step": 3780 |
| }, |
| { |
| "epoch": 0.0034910637059253208, |
| "grad_norm": 12.369451522827148, |
| "learning_rate": 1.9999918444203343e-07, |
| "loss": 1.1892, |
| "step": 3790 |
| }, |
| { |
| "epoch": 0.0035002749558090285, |
| "grad_norm": 22.71251678466797, |
| "learning_rate": 1.9999917858527054e-07, |
| "loss": 1.1771, |
| "step": 3800 |
| }, |
| { |
| "epoch": 0.0035094862056927367, |
| "grad_norm": 13.909941673278809, |
| "learning_rate": 1.9999917270755336e-07, |
| "loss": 1.296, |
| "step": 3810 |
| }, |
| { |
| "epoch": 0.0035186974555764445, |
| "grad_norm": 22.640085220336914, |
| "learning_rate": 1.9999916680888185e-07, |
| "loss": 1.1765, |
| "step": 3820 |
| }, |
| { |
| "epoch": 0.0035279087054601527, |
| "grad_norm": 13.252551078796387, |
| "learning_rate": 1.9999916088925605e-07, |
| "loss": 1.2353, |
| "step": 3830 |
| }, |
| { |
| "epoch": 0.0035371199553438605, |
| "grad_norm": 13.526108741760254, |
| "learning_rate": 1.9999915494867591e-07, |
| "loss": 1.1762, |
| "step": 3840 |
| }, |
| { |
| "epoch": 0.0035463312052275687, |
| "grad_norm": 13.243807792663574, |
| "learning_rate": 1.9999914898714148e-07, |
| "loss": 1.2295, |
| "step": 3850 |
| }, |
| { |
| "epoch": 0.0035555424551112764, |
| "grad_norm": 13.98582649230957, |
| "learning_rate": 1.9999914300465273e-07, |
| "loss": 1.2322, |
| "step": 3860 |
| }, |
| { |
| "epoch": 0.0035647537049949846, |
| "grad_norm": 12.01987361907959, |
| "learning_rate": 1.999991370012097e-07, |
| "loss": 1.2352, |
| "step": 3870 |
| }, |
| { |
| "epoch": 0.0035739649548786924, |
| "grad_norm": 13.643336296081543, |
| "learning_rate": 1.9999913097681233e-07, |
| "loss": 1.1808, |
| "step": 3880 |
| }, |
| { |
| "epoch": 0.0035831762047624006, |
| "grad_norm": 12.53453254699707, |
| "learning_rate": 1.9999912493146068e-07, |
| "loss": 1.2222, |
| "step": 3890 |
| }, |
| { |
| "epoch": 0.0035923874546461084, |
| "grad_norm": 12.463088035583496, |
| "learning_rate": 1.9999911886515474e-07, |
| "loss": 1.206, |
| "step": 3900 |
| }, |
| { |
| "epoch": 0.0036015987045298166, |
| "grad_norm": 12.918343544006348, |
| "learning_rate": 1.9999911277789446e-07, |
| "loss": 1.1843, |
| "step": 3910 |
| }, |
| { |
| "epoch": 0.0036108099544135243, |
| "grad_norm": 14.662476539611816, |
| "learning_rate": 1.9999910666967987e-07, |
| "loss": 1.221, |
| "step": 3920 |
| }, |
| { |
| "epoch": 0.003620021204297232, |
| "grad_norm": 12.663122177124023, |
| "learning_rate": 1.99999100540511e-07, |
| "loss": 1.2562, |
| "step": 3930 |
| }, |
| { |
| "epoch": 0.0036292324541809403, |
| "grad_norm": 12.291542053222656, |
| "learning_rate": 1.9999909439038786e-07, |
| "loss": 1.2119, |
| "step": 3940 |
| }, |
| { |
| "epoch": 0.003638443704064648, |
| "grad_norm": 12.939071655273438, |
| "learning_rate": 1.9999908821931043e-07, |
| "loss": 1.2768, |
| "step": 3950 |
| }, |
| { |
| "epoch": 0.0036476549539483562, |
| "grad_norm": 21.85366439819336, |
| "learning_rate": 1.9999908202727867e-07, |
| "loss": 1.2957, |
| "step": 3960 |
| }, |
| { |
| "epoch": 0.003656866203832064, |
| "grad_norm": 13.644819259643555, |
| "learning_rate": 1.999990758142926e-07, |
| "loss": 1.237, |
| "step": 3970 |
| }, |
| { |
| "epoch": 0.003666077453715772, |
| "grad_norm": 13.78133487701416, |
| "learning_rate": 1.9999906958035228e-07, |
| "loss": 1.2535, |
| "step": 3980 |
| }, |
| { |
| "epoch": 0.00367528870359948, |
| "grad_norm": 12.343091011047363, |
| "learning_rate": 1.9999906332545767e-07, |
| "loss": 1.2438, |
| "step": 3990 |
| }, |
| { |
| "epoch": 0.003684499953483188, |
| "grad_norm": 15.756454467773438, |
| "learning_rate": 1.9999905704960875e-07, |
| "loss": 1.2275, |
| "step": 4000 |
| }, |
| { |
| "epoch": 0.003693711203366896, |
| "grad_norm": 11.772343635559082, |
| "learning_rate": 1.9999905075280555e-07, |
| "loss": 1.1476, |
| "step": 4010 |
| }, |
| { |
| "epoch": 0.003702922453250604, |
| "grad_norm": 12.028583526611328, |
| "learning_rate": 1.9999904443504802e-07, |
| "loss": 1.1931, |
| "step": 4020 |
| }, |
| { |
| "epoch": 0.003712133703134312, |
| "grad_norm": 14.000866889953613, |
| "learning_rate": 1.9999903809633626e-07, |
| "loss": 1.229, |
| "step": 4030 |
| }, |
| { |
| "epoch": 0.00372134495301802, |
| "grad_norm": 13.003263473510742, |
| "learning_rate": 1.999990317366702e-07, |
| "loss": 1.2522, |
| "step": 4040 |
| }, |
| { |
| "epoch": 0.003730556202901728, |
| "grad_norm": 23.33169937133789, |
| "learning_rate": 1.9999902535604985e-07, |
| "loss": 1.2131, |
| "step": 4050 |
| }, |
| { |
| "epoch": 0.003739767452785436, |
| "grad_norm": 13.117879867553711, |
| "learning_rate": 1.9999901895447521e-07, |
| "loss": 1.2133, |
| "step": 4060 |
| }, |
| { |
| "epoch": 0.003748978702669144, |
| "grad_norm": 16.691890716552734, |
| "learning_rate": 1.999990125319463e-07, |
| "loss": 1.1995, |
| "step": 4070 |
| }, |
| { |
| "epoch": 0.003758189952552852, |
| "grad_norm": 11.178106307983398, |
| "learning_rate": 1.9999900608846313e-07, |
| "loss": 1.234, |
| "step": 4080 |
| }, |
| { |
| "epoch": 0.00376740120243656, |
| "grad_norm": 12.789695739746094, |
| "learning_rate": 1.9999899962402568e-07, |
| "loss": 1.2273, |
| "step": 4090 |
| }, |
| { |
| "epoch": 0.003776612452320268, |
| "grad_norm": 14.797239303588867, |
| "learning_rate": 1.999989931386339e-07, |
| "loss": 1.2125, |
| "step": 4100 |
| }, |
| { |
| "epoch": 0.0037858237022039757, |
| "grad_norm": 15.13731575012207, |
| "learning_rate": 1.9999898663228788e-07, |
| "loss": 1.2217, |
| "step": 4110 |
| }, |
| { |
| "epoch": 0.003795034952087684, |
| "grad_norm": 13.130472183227539, |
| "learning_rate": 1.9999898010498759e-07, |
| "loss": 1.3013, |
| "step": 4120 |
| }, |
| { |
| "epoch": 0.0038042462019713917, |
| "grad_norm": 14.67823600769043, |
| "learning_rate": 1.9999897355673302e-07, |
| "loss": 1.2177, |
| "step": 4130 |
| }, |
| { |
| "epoch": 0.0038134574518550995, |
| "grad_norm": 12.409622192382812, |
| "learning_rate": 1.999989669875242e-07, |
| "loss": 1.1578, |
| "step": 4140 |
| }, |
| { |
| "epoch": 0.0038226687017388077, |
| "grad_norm": 16.759443283081055, |
| "learning_rate": 1.9999896039736108e-07, |
| "loss": 1.1875, |
| "step": 4150 |
| }, |
| { |
| "epoch": 0.0038318799516225154, |
| "grad_norm": 15.062689781188965, |
| "learning_rate": 1.9999895378624373e-07, |
| "loss": 1.2455, |
| "step": 4160 |
| }, |
| { |
| "epoch": 0.0038410912015062236, |
| "grad_norm": 23.438220977783203, |
| "learning_rate": 1.999989471541721e-07, |
| "loss": 1.1894, |
| "step": 4170 |
| }, |
| { |
| "epoch": 0.0038503024513899314, |
| "grad_norm": 12.017776489257812, |
| "learning_rate": 1.9999894050114618e-07, |
| "loss": 1.1714, |
| "step": 4180 |
| }, |
| { |
| "epoch": 0.0038595137012736396, |
| "grad_norm": 13.029670715332031, |
| "learning_rate": 1.99998933827166e-07, |
| "loss": 1.1813, |
| "step": 4190 |
| }, |
| { |
| "epoch": 0.0038687249511573474, |
| "grad_norm": 17.56375503540039, |
| "learning_rate": 1.999989271322316e-07, |
| "loss": 1.2203, |
| "step": 4200 |
| }, |
| { |
| "epoch": 0.0038779362010410556, |
| "grad_norm": 11.085100173950195, |
| "learning_rate": 1.999989204163429e-07, |
| "loss": 1.2137, |
| "step": 4210 |
| }, |
| { |
| "epoch": 0.0038871474509247633, |
| "grad_norm": 16.616640090942383, |
| "learning_rate": 1.9999891367949996e-07, |
| "loss": 1.162, |
| "step": 4220 |
| }, |
| { |
| "epoch": 0.0038963587008084715, |
| "grad_norm": 13.10045337677002, |
| "learning_rate": 1.9999890692170273e-07, |
| "loss": 1.222, |
| "step": 4230 |
| }, |
| { |
| "epoch": 0.0039055699506921793, |
| "grad_norm": 14.615435600280762, |
| "learning_rate": 1.9999890014295126e-07, |
| "loss": 1.1992, |
| "step": 4240 |
| }, |
| { |
| "epoch": 0.0039147812005758875, |
| "grad_norm": 13.430987358093262, |
| "learning_rate": 1.9999889334324554e-07, |
| "loss": 1.19, |
| "step": 4250 |
| }, |
| { |
| "epoch": 0.003923992450459595, |
| "grad_norm": 15.447749137878418, |
| "learning_rate": 1.9999888652258555e-07, |
| "loss": 1.2524, |
| "step": 4260 |
| }, |
| { |
| "epoch": 0.003933203700343303, |
| "grad_norm": 11.363496780395508, |
| "learning_rate": 1.9999887968097134e-07, |
| "loss": 1.1288, |
| "step": 4270 |
| }, |
| { |
| "epoch": 0.003942414950227012, |
| "grad_norm": 13.688098907470703, |
| "learning_rate": 1.9999887281840284e-07, |
| "loss": 1.1599, |
| "step": 4280 |
| }, |
| { |
| "epoch": 0.003951626200110719, |
| "grad_norm": 14.446454048156738, |
| "learning_rate": 1.9999886593488011e-07, |
| "loss": 1.1777, |
| "step": 4290 |
| }, |
| { |
| "epoch": 0.003960837449994427, |
| "grad_norm": 11.413469314575195, |
| "learning_rate": 1.9999885903040312e-07, |
| "loss": 1.2309, |
| "step": 4300 |
| }, |
| { |
| "epoch": 0.003970048699878135, |
| "grad_norm": 17.09931755065918, |
| "learning_rate": 1.999988521049719e-07, |
| "loss": 1.1879, |
| "step": 4310 |
| }, |
| { |
| "epoch": 0.003979259949761843, |
| "grad_norm": 11.551513671875, |
| "learning_rate": 1.9999884515858642e-07, |
| "loss": 1.1972, |
| "step": 4320 |
| }, |
| { |
| "epoch": 0.003988471199645551, |
| "grad_norm": 13.268471717834473, |
| "learning_rate": 1.999988381912467e-07, |
| "loss": 1.1657, |
| "step": 4330 |
| }, |
| { |
| "epoch": 0.003997682449529259, |
| "grad_norm": 11.781455039978027, |
| "learning_rate": 1.9999883120295276e-07, |
| "loss": 1.1493, |
| "step": 4340 |
| }, |
| { |
| "epoch": 0.004006893699412967, |
| "grad_norm": 13.025263786315918, |
| "learning_rate": 1.9999882419370456e-07, |
| "loss": 1.1813, |
| "step": 4350 |
| }, |
| { |
| "epoch": 0.004016104949296675, |
| "grad_norm": 15.52880859375, |
| "learning_rate": 1.9999881716350212e-07, |
| "loss": 1.2835, |
| "step": 4360 |
| }, |
| { |
| "epoch": 0.004025316199180383, |
| "grad_norm": 11.303088188171387, |
| "learning_rate": 1.9999881011234544e-07, |
| "loss": 1.1766, |
| "step": 4370 |
| }, |
| { |
| "epoch": 0.004034527449064091, |
| "grad_norm": 12.284314155578613, |
| "learning_rate": 1.999988030402345e-07, |
| "loss": 1.2264, |
| "step": 4380 |
| }, |
| { |
| "epoch": 0.004043738698947799, |
| "grad_norm": 14.824917793273926, |
| "learning_rate": 1.9999879594716935e-07, |
| "loss": 1.1067, |
| "step": 4390 |
| }, |
| { |
| "epoch": 0.0040529499488315066, |
| "grad_norm": 13.018584251403809, |
| "learning_rate": 1.9999878883314999e-07, |
| "loss": 1.199, |
| "step": 4400 |
| }, |
| { |
| "epoch": 0.004062161198715215, |
| "grad_norm": 14.732589721679688, |
| "learning_rate": 1.9999878169817637e-07, |
| "loss": 1.222, |
| "step": 4410 |
| }, |
| { |
| "epoch": 0.004071372448598923, |
| "grad_norm": 12.38150405883789, |
| "learning_rate": 1.9999877454224851e-07, |
| "loss": 1.1282, |
| "step": 4420 |
| }, |
| { |
| "epoch": 0.004080583698482631, |
| "grad_norm": 13.039387702941895, |
| "learning_rate": 1.9999876736536644e-07, |
| "loss": 1.1889, |
| "step": 4430 |
| }, |
| { |
| "epoch": 0.0040897949483663385, |
| "grad_norm": 14.31584358215332, |
| "learning_rate": 1.9999876016753014e-07, |
| "loss": 1.2573, |
| "step": 4440 |
| }, |
| { |
| "epoch": 0.004099006198250047, |
| "grad_norm": 23.066286087036133, |
| "learning_rate": 1.999987529487396e-07, |
| "loss": 1.1515, |
| "step": 4450 |
| }, |
| { |
| "epoch": 0.004108217448133755, |
| "grad_norm": 10.693852424621582, |
| "learning_rate": 1.9999874570899484e-07, |
| "loss": 1.1947, |
| "step": 4460 |
| }, |
| { |
| "epoch": 0.004117428698017463, |
| "grad_norm": 29.878801345825195, |
| "learning_rate": 1.9999873844829586e-07, |
| "loss": 1.2026, |
| "step": 4470 |
| }, |
| { |
| "epoch": 0.00412663994790117, |
| "grad_norm": 12.488697052001953, |
| "learning_rate": 1.999987311666427e-07, |
| "loss": 1.1943, |
| "step": 4480 |
| }, |
| { |
| "epoch": 0.004135851197784879, |
| "grad_norm": 13.091660499572754, |
| "learning_rate": 1.9999872386403525e-07, |
| "loss": 1.1772, |
| "step": 4490 |
| }, |
| { |
| "epoch": 0.004145062447668587, |
| "grad_norm": 13.047904014587402, |
| "learning_rate": 1.999987165404736e-07, |
| "loss": 1.2562, |
| "step": 4500 |
| }, |
| { |
| "epoch": 0.0041542736975522946, |
| "grad_norm": 12.073881149291992, |
| "learning_rate": 1.9999870919595776e-07, |
| "loss": 1.2068, |
| "step": 4510 |
| }, |
| { |
| "epoch": 0.004163484947436002, |
| "grad_norm": 12.780438423156738, |
| "learning_rate": 1.9999870183048768e-07, |
| "loss": 1.2062, |
| "step": 4520 |
| }, |
| { |
| "epoch": 0.00417269619731971, |
| "grad_norm": 14.03786849975586, |
| "learning_rate": 1.999986944440634e-07, |
| "loss": 1.1582, |
| "step": 4530 |
| }, |
| { |
| "epoch": 0.004181907447203419, |
| "grad_norm": 12.298108100891113, |
| "learning_rate": 1.999986870366849e-07, |
| "loss": 1.1663, |
| "step": 4540 |
| }, |
| { |
| "epoch": 0.0041911186970871265, |
| "grad_norm": 13.728750228881836, |
| "learning_rate": 1.9999867960835218e-07, |
| "loss": 1.179, |
| "step": 4550 |
| }, |
| { |
| "epoch": 0.004200329946970834, |
| "grad_norm": 13.970946311950684, |
| "learning_rate": 1.9999867215906526e-07, |
| "loss": 1.1296, |
| "step": 4560 |
| }, |
| { |
| "epoch": 0.004209541196854542, |
| "grad_norm": 14.478928565979004, |
| "learning_rate": 1.9999866468882415e-07, |
| "loss": 1.118, |
| "step": 4570 |
| }, |
| { |
| "epoch": 0.004218752446738251, |
| "grad_norm": 11.310677528381348, |
| "learning_rate": 1.9999865719762882e-07, |
| "loss": 1.1628, |
| "step": 4580 |
| }, |
| { |
| "epoch": 0.004227963696621958, |
| "grad_norm": 12.71045207977295, |
| "learning_rate": 1.9999864968547927e-07, |
| "loss": 1.1373, |
| "step": 4590 |
| }, |
| { |
| "epoch": 0.004237174946505666, |
| "grad_norm": 10.45562744140625, |
| "learning_rate": 1.9999864215237552e-07, |
| "loss": 1.1827, |
| "step": 4600 |
| }, |
| { |
| "epoch": 0.004246386196389374, |
| "grad_norm": 11.848020553588867, |
| "learning_rate": 1.999986345983176e-07, |
| "loss": 1.1976, |
| "step": 4610 |
| }, |
| { |
| "epoch": 0.004255597446273083, |
| "grad_norm": 12.444038391113281, |
| "learning_rate": 1.9999862702330544e-07, |
| "loss": 1.1178, |
| "step": 4620 |
| }, |
| { |
| "epoch": 0.00426480869615679, |
| "grad_norm": 11.427351951599121, |
| "learning_rate": 1.999986194273391e-07, |
| "loss": 1.1659, |
| "step": 4630 |
| }, |
| { |
| "epoch": 0.004274019946040498, |
| "grad_norm": 13.343820571899414, |
| "learning_rate": 1.9999861181041858e-07, |
| "loss": 1.1711, |
| "step": 4640 |
| }, |
| { |
| "epoch": 0.004283231195924206, |
| "grad_norm": 11.601849555969238, |
| "learning_rate": 1.9999860417254385e-07, |
| "loss": 1.2032, |
| "step": 4650 |
| }, |
| { |
| "epoch": 0.0042924424458079145, |
| "grad_norm": 11.711636543273926, |
| "learning_rate": 1.999985965137149e-07, |
| "loss": 1.2246, |
| "step": 4660 |
| }, |
| { |
| "epoch": 0.004301653695691622, |
| "grad_norm": 12.642359733581543, |
| "learning_rate": 1.999985888339318e-07, |
| "loss": 1.1825, |
| "step": 4670 |
| }, |
| { |
| "epoch": 0.00431086494557533, |
| "grad_norm": 11.899884223937988, |
| "learning_rate": 1.9999858113319448e-07, |
| "loss": 1.1839, |
| "step": 4680 |
| }, |
| { |
| "epoch": 0.004320076195459038, |
| "grad_norm": 13.397693634033203, |
| "learning_rate": 1.9999857341150298e-07, |
| "loss": 1.1555, |
| "step": 4690 |
| }, |
| { |
| "epoch": 0.004329287445342746, |
| "grad_norm": 11.658913612365723, |
| "learning_rate": 1.999985656688573e-07, |
| "loss": 1.114, |
| "step": 4700 |
| }, |
| { |
| "epoch": 0.004338498695226454, |
| "grad_norm": 10.457526206970215, |
| "learning_rate": 1.9999855790525743e-07, |
| "loss": 1.207, |
| "step": 4710 |
| }, |
| { |
| "epoch": 0.004347709945110162, |
| "grad_norm": 12.438407897949219, |
| "learning_rate": 1.9999855012070336e-07, |
| "loss": 1.1374, |
| "step": 4720 |
| }, |
| { |
| "epoch": 0.00435692119499387, |
| "grad_norm": 11.482919692993164, |
| "learning_rate": 1.9999854231519512e-07, |
| "loss": 1.1584, |
| "step": 4730 |
| }, |
| { |
| "epoch": 0.0043661324448775775, |
| "grad_norm": 13.851531982421875, |
| "learning_rate": 1.999985344887327e-07, |
| "loss": 1.1566, |
| "step": 4740 |
| }, |
| { |
| "epoch": 0.004375343694761286, |
| "grad_norm": 11.870559692382812, |
| "learning_rate": 1.9999852664131614e-07, |
| "loss": 1.1489, |
| "step": 4750 |
| }, |
| { |
| "epoch": 0.004384554944644994, |
| "grad_norm": 12.809205055236816, |
| "learning_rate": 1.9999851877294535e-07, |
| "loss": 1.1326, |
| "step": 4760 |
| }, |
| { |
| "epoch": 0.004393766194528702, |
| "grad_norm": 13.546515464782715, |
| "learning_rate": 1.9999851088362042e-07, |
| "loss": 1.1899, |
| "step": 4770 |
| }, |
| { |
| "epoch": 0.004402977444412409, |
| "grad_norm": 11.289336204528809, |
| "learning_rate": 1.9999850297334127e-07, |
| "loss": 1.1982, |
| "step": 4780 |
| }, |
| { |
| "epoch": 0.004412188694296118, |
| "grad_norm": 11.107280731201172, |
| "learning_rate": 1.99998495042108e-07, |
| "loss": 1.1374, |
| "step": 4790 |
| }, |
| { |
| "epoch": 0.004421399944179826, |
| "grad_norm": 12.278093338012695, |
| "learning_rate": 1.999984870899205e-07, |
| "loss": 1.1996, |
| "step": 4800 |
| }, |
| { |
| "epoch": 0.004430611194063534, |
| "grad_norm": 12.069204330444336, |
| "learning_rate": 1.999984791167789e-07, |
| "loss": 1.1325, |
| "step": 4810 |
| }, |
| { |
| "epoch": 0.004439822443947241, |
| "grad_norm": 13.429888725280762, |
| "learning_rate": 1.999984711226831e-07, |
| "loss": 1.1835, |
| "step": 4820 |
| }, |
| { |
| "epoch": 0.00444903369383095, |
| "grad_norm": 13.618329048156738, |
| "learning_rate": 1.999984631076331e-07, |
| "loss": 1.1408, |
| "step": 4830 |
| }, |
| { |
| "epoch": 0.004458244943714658, |
| "grad_norm": 13.587790489196777, |
| "learning_rate": 1.9999845507162897e-07, |
| "loss": 1.1188, |
| "step": 4840 |
| }, |
| { |
| "epoch": 0.0044674561935983655, |
| "grad_norm": 11.405389785766602, |
| "learning_rate": 1.9999844701467069e-07, |
| "loss": 1.1872, |
| "step": 4850 |
| }, |
| { |
| "epoch": 0.004476667443482073, |
| "grad_norm": 11.70238971710205, |
| "learning_rate": 1.9999843893675824e-07, |
| "loss": 1.1724, |
| "step": 4860 |
| }, |
| { |
| "epoch": 0.004485878693365782, |
| "grad_norm": 63.03736114501953, |
| "learning_rate": 1.999984308378916e-07, |
| "loss": 1.1506, |
| "step": 4870 |
| }, |
| { |
| "epoch": 0.00449508994324949, |
| "grad_norm": 19.9056453704834, |
| "learning_rate": 1.9999842271807084e-07, |
| "loss": 1.2304, |
| "step": 4880 |
| }, |
| { |
| "epoch": 0.004504301193133197, |
| "grad_norm": 13.55337905883789, |
| "learning_rate": 1.9999841457729595e-07, |
| "loss": 1.1292, |
| "step": 4890 |
| }, |
| { |
| "epoch": 0.004513512443016905, |
| "grad_norm": 18.283336639404297, |
| "learning_rate": 1.9999840641556684e-07, |
| "loss": 1.1723, |
| "step": 4900 |
| }, |
| { |
| "epoch": 0.004522723692900613, |
| "grad_norm": 39.903018951416016, |
| "learning_rate": 1.999983982328836e-07, |
| "loss": 1.1681, |
| "step": 4910 |
| }, |
| { |
| "epoch": 0.004531934942784322, |
| "grad_norm": 13.062829971313477, |
| "learning_rate": 1.9999839002924622e-07, |
| "loss": 1.1849, |
| "step": 4920 |
| }, |
| { |
| "epoch": 0.004541146192668029, |
| "grad_norm": 12.126574516296387, |
| "learning_rate": 1.999983818046547e-07, |
| "loss": 1.1863, |
| "step": 4930 |
| }, |
| { |
| "epoch": 0.004550357442551737, |
| "grad_norm": 11.967127799987793, |
| "learning_rate": 1.99998373559109e-07, |
| "loss": 1.1161, |
| "step": 4940 |
| }, |
| { |
| "epoch": 0.004559568692435445, |
| "grad_norm": 11.391254425048828, |
| "learning_rate": 1.9999836529260921e-07, |
| "loss": 1.199, |
| "step": 4950 |
| }, |
| { |
| "epoch": 0.0045687799423191535, |
| "grad_norm": 13.91141128540039, |
| "learning_rate": 1.9999835700515524e-07, |
| "loss": 1.181, |
| "step": 4960 |
| }, |
| { |
| "epoch": 0.004577991192202861, |
| "grad_norm": 12.89747428894043, |
| "learning_rate": 1.999983486967471e-07, |
| "loss": 1.1863, |
| "step": 4970 |
| }, |
| { |
| "epoch": 0.004587202442086569, |
| "grad_norm": 15.344111442565918, |
| "learning_rate": 1.9999834036738488e-07, |
| "loss": 1.1701, |
| "step": 4980 |
| }, |
| { |
| "epoch": 0.004596413691970277, |
| "grad_norm": 11.81086540222168, |
| "learning_rate": 1.9999833201706848e-07, |
| "loss": 1.1477, |
| "step": 4990 |
| }, |
| { |
| "epoch": 0.004605624941853985, |
| "grad_norm": 12.476078033447266, |
| "learning_rate": 1.9999832364579795e-07, |
| "loss": 1.1618, |
| "step": 5000 |
| }, |
| { |
| "epoch": 0.004614836191737693, |
| "grad_norm": 23.387897491455078, |
| "learning_rate": 1.9999831525357328e-07, |
| "loss": 1.1536, |
| "step": 5010 |
| }, |
| { |
| "epoch": 0.004624047441621401, |
| "grad_norm": 13.419051170349121, |
| "learning_rate": 1.999983068403945e-07, |
| "loss": 1.1322, |
| "step": 5020 |
| }, |
| { |
| "epoch": 0.004633258691505109, |
| "grad_norm": 12.904322624206543, |
| "learning_rate": 1.999982984062616e-07, |
| "loss": 1.1787, |
| "step": 5030 |
| }, |
| { |
| "epoch": 0.004642469941388817, |
| "grad_norm": 12.85203742980957, |
| "learning_rate": 1.9999828995117451e-07, |
| "loss": 1.1478, |
| "step": 5040 |
| }, |
| { |
| "epoch": 0.004651681191272525, |
| "grad_norm": 14.170493125915527, |
| "learning_rate": 1.9999828147513337e-07, |
| "loss": 1.1668, |
| "step": 5050 |
| }, |
| { |
| "epoch": 0.004660892441156233, |
| "grad_norm": 11.131400108337402, |
| "learning_rate": 1.9999827297813803e-07, |
| "loss": 1.1408, |
| "step": 5060 |
| }, |
| { |
| "epoch": 0.004670103691039941, |
| "grad_norm": 12.100055694580078, |
| "learning_rate": 1.9999826446018863e-07, |
| "loss": 1.1545, |
| "step": 5070 |
| }, |
| { |
| "epoch": 0.004679314940923649, |
| "grad_norm": 12.49963092803955, |
| "learning_rate": 1.9999825592128506e-07, |
| "loss": 1.1676, |
| "step": 5080 |
| }, |
| { |
| "epoch": 0.004688526190807357, |
| "grad_norm": 13.807575225830078, |
| "learning_rate": 1.9999824736142736e-07, |
| "loss": 1.0957, |
| "step": 5090 |
| }, |
| { |
| "epoch": 0.004697737440691065, |
| "grad_norm": 12.069953918457031, |
| "learning_rate": 1.9999823878061556e-07, |
| "loss": 1.1667, |
| "step": 5100 |
| }, |
| { |
| "epoch": 0.004706948690574773, |
| "grad_norm": 11.91406536102295, |
| "learning_rate": 1.9999823017884966e-07, |
| "loss": 1.1408, |
| "step": 5110 |
| }, |
| { |
| "epoch": 0.00471615994045848, |
| "grad_norm": 12.7337064743042, |
| "learning_rate": 1.9999822155612964e-07, |
| "loss": 1.1029, |
| "step": 5120 |
| }, |
| { |
| "epoch": 0.004725371190342189, |
| "grad_norm": 12.67042064666748, |
| "learning_rate": 1.999982129124555e-07, |
| "loss": 1.1393, |
| "step": 5130 |
| }, |
| { |
| "epoch": 0.004734582440225897, |
| "grad_norm": 14.979564666748047, |
| "learning_rate": 1.9999820424782724e-07, |
| "loss": 1.1588, |
| "step": 5140 |
| }, |
| { |
| "epoch": 0.0047437936901096045, |
| "grad_norm": 12.554140090942383, |
| "learning_rate": 1.9999819556224491e-07, |
| "loss": 1.1399, |
| "step": 5150 |
| }, |
| { |
| "epoch": 0.004753004939993312, |
| "grad_norm": 12.427827835083008, |
| "learning_rate": 1.9999818685570845e-07, |
| "loss": 1.1983, |
| "step": 5160 |
| }, |
| { |
| "epoch": 0.004762216189877021, |
| "grad_norm": 12.657371520996094, |
| "learning_rate": 1.9999817812821786e-07, |
| "loss": 1.1407, |
| "step": 5170 |
| }, |
| { |
| "epoch": 0.004771427439760729, |
| "grad_norm": 14.82480239868164, |
| "learning_rate": 1.999981693797732e-07, |
| "loss": 1.1509, |
| "step": 5180 |
| }, |
| { |
| "epoch": 0.004780638689644436, |
| "grad_norm": 12.023574829101562, |
| "learning_rate": 1.999981606103744e-07, |
| "loss": 1.1652, |
| "step": 5190 |
| }, |
| { |
| "epoch": 0.004789849939528144, |
| "grad_norm": 12.974525451660156, |
| "learning_rate": 1.999981518200215e-07, |
| "loss": 1.1624, |
| "step": 5200 |
| }, |
| { |
| "epoch": 0.004799061189411853, |
| "grad_norm": 13.587610244750977, |
| "learning_rate": 1.9999814300871454e-07, |
| "loss": 1.1068, |
| "step": 5210 |
| }, |
| { |
| "epoch": 0.004808272439295561, |
| "grad_norm": 11.925540924072266, |
| "learning_rate": 1.9999813417645345e-07, |
| "loss": 1.1311, |
| "step": 5220 |
| }, |
| { |
| "epoch": 0.004817483689179268, |
| "grad_norm": 18.89828109741211, |
| "learning_rate": 1.999981253232383e-07, |
| "loss": 1.1606, |
| "step": 5230 |
| }, |
| { |
| "epoch": 0.004826694939062976, |
| "grad_norm": 11.256651878356934, |
| "learning_rate": 1.9999811644906903e-07, |
| "loss": 1.1365, |
| "step": 5240 |
| }, |
| { |
| "epoch": 0.004835906188946685, |
| "grad_norm": 16.999616622924805, |
| "learning_rate": 1.9999810755394568e-07, |
| "loss": 1.1494, |
| "step": 5250 |
| }, |
| { |
| "epoch": 0.0048451174388303925, |
| "grad_norm": 14.34951114654541, |
| "learning_rate": 1.9999809863786822e-07, |
| "loss": 1.1542, |
| "step": 5260 |
| }, |
| { |
| "epoch": 0.0048543286887141, |
| "grad_norm": 12.929566383361816, |
| "learning_rate": 1.999980897008367e-07, |
| "loss": 1.1571, |
| "step": 5270 |
| }, |
| { |
| "epoch": 0.004863539938597808, |
| "grad_norm": 12.252326011657715, |
| "learning_rate": 1.9999808074285106e-07, |
| "loss": 1.115, |
| "step": 5280 |
| }, |
| { |
| "epoch": 0.004872751188481517, |
| "grad_norm": 12.771810531616211, |
| "learning_rate": 1.9999807176391136e-07, |
| "loss": 1.1686, |
| "step": 5290 |
| }, |
| { |
| "epoch": 0.0048819624383652244, |
| "grad_norm": 11.848444938659668, |
| "learning_rate": 1.9999806276401758e-07, |
| "loss": 1.1381, |
| "step": 5300 |
| }, |
| { |
| "epoch": 0.004891173688248932, |
| "grad_norm": 11.767483711242676, |
| "learning_rate": 1.999980537431697e-07, |
| "loss": 1.1293, |
| "step": 5310 |
| }, |
| { |
| "epoch": 0.00490038493813264, |
| "grad_norm": 12.1432466506958, |
| "learning_rate": 1.9999804470136778e-07, |
| "loss": 1.1243, |
| "step": 5320 |
| }, |
| { |
| "epoch": 0.004909596188016348, |
| "grad_norm": 12.382589340209961, |
| "learning_rate": 1.9999803563861174e-07, |
| "loss": 1.1021, |
| "step": 5330 |
| }, |
| { |
| "epoch": 0.004918807437900056, |
| "grad_norm": 14.19037914276123, |
| "learning_rate": 1.9999802655490166e-07, |
| "loss": 1.1279, |
| "step": 5340 |
| }, |
| { |
| "epoch": 0.004928018687783764, |
| "grad_norm": 13.007542610168457, |
| "learning_rate": 1.999980174502375e-07, |
| "loss": 1.2021, |
| "step": 5350 |
| }, |
| { |
| "epoch": 0.004937229937667472, |
| "grad_norm": 17.356603622436523, |
| "learning_rate": 1.9999800832461923e-07, |
| "loss": 1.1367, |
| "step": 5360 |
| }, |
| { |
| "epoch": 0.00494644118755118, |
| "grad_norm": 12.019670486450195, |
| "learning_rate": 1.9999799917804695e-07, |
| "loss": 1.1387, |
| "step": 5370 |
| }, |
| { |
| "epoch": 0.004955652437434888, |
| "grad_norm": 11.480986595153809, |
| "learning_rate": 1.9999799001052055e-07, |
| "loss": 1.1, |
| "step": 5380 |
| }, |
| { |
| "epoch": 0.004964863687318596, |
| "grad_norm": 12.47780704498291, |
| "learning_rate": 1.9999798082204012e-07, |
| "loss": 1.1591, |
| "step": 5390 |
| }, |
| { |
| "epoch": 0.004974074937202304, |
| "grad_norm": 11.818765640258789, |
| "learning_rate": 1.9999797161260564e-07, |
| "loss": 1.1327, |
| "step": 5400 |
| }, |
| { |
| "epoch": 0.004983286187086012, |
| "grad_norm": 18.358598709106445, |
| "learning_rate": 1.9999796238221706e-07, |
| "loss": 1.0953, |
| "step": 5410 |
| }, |
| { |
| "epoch": 0.00499249743696972, |
| "grad_norm": 16.343196868896484, |
| "learning_rate": 1.9999795313087443e-07, |
| "loss": 1.1561, |
| "step": 5420 |
| }, |
| { |
| "epoch": 0.005001708686853428, |
| "grad_norm": 13.27428913116455, |
| "learning_rate": 1.9999794385857776e-07, |
| "loss": 1.1744, |
| "step": 5430 |
| }, |
| { |
| "epoch": 0.005010919936737136, |
| "grad_norm": 11.715804100036621, |
| "learning_rate": 1.9999793456532703e-07, |
| "loss": 1.1809, |
| "step": 5440 |
| }, |
| { |
| "epoch": 0.0050201311866208435, |
| "grad_norm": 14.150225639343262, |
| "learning_rate": 1.9999792525112225e-07, |
| "loss": 1.1096, |
| "step": 5450 |
| }, |
| { |
| "epoch": 0.005029342436504552, |
| "grad_norm": 11.448081016540527, |
| "learning_rate": 1.999979159159634e-07, |
| "loss": 1.1226, |
| "step": 5460 |
| }, |
| { |
| "epoch": 0.00503855368638826, |
| "grad_norm": 14.43429183959961, |
| "learning_rate": 1.9999790655985055e-07, |
| "loss": 1.1901, |
| "step": 5470 |
| }, |
| { |
| "epoch": 0.005047764936271968, |
| "grad_norm": 15.267404556274414, |
| "learning_rate": 1.9999789718278358e-07, |
| "loss": 1.2112, |
| "step": 5480 |
| }, |
| { |
| "epoch": 0.005056976186155675, |
| "grad_norm": 10.746675491333008, |
| "learning_rate": 1.999978877847626e-07, |
| "loss": 1.2013, |
| "step": 5490 |
| }, |
| { |
| "epoch": 0.005066187436039383, |
| "grad_norm": 12.495003700256348, |
| "learning_rate": 1.9999787836578757e-07, |
| "loss": 1.1167, |
| "step": 5500 |
| }, |
| { |
| "epoch": 0.005075398685923092, |
| "grad_norm": 12.662280082702637, |
| "learning_rate": 1.9999786892585853e-07, |
| "loss": 1.1384, |
| "step": 5510 |
| }, |
| { |
| "epoch": 0.0050846099358068, |
| "grad_norm": 12.443802833557129, |
| "learning_rate": 1.9999785946497543e-07, |
| "loss": 1.0913, |
| "step": 5520 |
| }, |
| { |
| "epoch": 0.005093821185690507, |
| "grad_norm": 11.457616806030273, |
| "learning_rate": 1.9999784998313827e-07, |
| "loss": 1.1445, |
| "step": 5530 |
| }, |
| { |
| "epoch": 0.005103032435574215, |
| "grad_norm": 11.51938247680664, |
| "learning_rate": 1.9999784048034708e-07, |
| "loss": 1.1694, |
| "step": 5540 |
| }, |
| { |
| "epoch": 0.005112243685457924, |
| "grad_norm": 13.588903427124023, |
| "learning_rate": 1.9999783095660188e-07, |
| "loss": 1.1794, |
| "step": 5550 |
| }, |
| { |
| "epoch": 0.0051214549353416315, |
| "grad_norm": 14.660185813903809, |
| "learning_rate": 1.9999782141190265e-07, |
| "loss": 1.0849, |
| "step": 5560 |
| }, |
| { |
| "epoch": 0.005130666185225339, |
| "grad_norm": 20.53721809387207, |
| "learning_rate": 1.9999781184624935e-07, |
| "loss": 1.1461, |
| "step": 5570 |
| }, |
| { |
| "epoch": 0.005139877435109047, |
| "grad_norm": 16.216266632080078, |
| "learning_rate": 1.9999780225964208e-07, |
| "loss": 1.1413, |
| "step": 5580 |
| }, |
| { |
| "epoch": 0.005149088684992756, |
| "grad_norm": 19.000761032104492, |
| "learning_rate": 1.9999779265208075e-07, |
| "loss": 1.1266, |
| "step": 5590 |
| }, |
| { |
| "epoch": 0.0051582999348764634, |
| "grad_norm": 11.136652946472168, |
| "learning_rate": 1.9999778302356538e-07, |
| "loss": 1.1386, |
| "step": 5600 |
| }, |
| { |
| "epoch": 0.005167511184760171, |
| "grad_norm": 12.58549690246582, |
| "learning_rate": 1.9999777337409603e-07, |
| "loss": 1.1601, |
| "step": 5610 |
| }, |
| { |
| "epoch": 0.005176722434643879, |
| "grad_norm": 12.056522369384766, |
| "learning_rate": 1.9999776370367262e-07, |
| "loss": 1.135, |
| "step": 5620 |
| }, |
| { |
| "epoch": 0.005185933684527588, |
| "grad_norm": 15.700850486755371, |
| "learning_rate": 1.999977540122952e-07, |
| "loss": 1.165, |
| "step": 5630 |
| }, |
| { |
| "epoch": 0.005195144934411295, |
| "grad_norm": 14.397699356079102, |
| "learning_rate": 1.999977442999638e-07, |
| "loss": 1.115, |
| "step": 5640 |
| }, |
| { |
| "epoch": 0.005204356184295003, |
| "grad_norm": 11.212671279907227, |
| "learning_rate": 1.9999773456667833e-07, |
| "loss": 1.1227, |
| "step": 5650 |
| }, |
| { |
| "epoch": 0.005213567434178711, |
| "grad_norm": 11.996686935424805, |
| "learning_rate": 1.999977248124389e-07, |
| "loss": 1.1014, |
| "step": 5660 |
| }, |
| { |
| "epoch": 0.0052227786840624195, |
| "grad_norm": 19.226478576660156, |
| "learning_rate": 1.9999771503724546e-07, |
| "loss": 1.1689, |
| "step": 5670 |
| }, |
| { |
| "epoch": 0.005231989933946127, |
| "grad_norm": 11.640504837036133, |
| "learning_rate": 1.9999770524109798e-07, |
| "loss": 1.1692, |
| "step": 5680 |
| }, |
| { |
| "epoch": 0.005241201183829835, |
| "grad_norm": 16.938703536987305, |
| "learning_rate": 1.9999769542399652e-07, |
| "loss": 1.1036, |
| "step": 5690 |
| }, |
| { |
| "epoch": 0.005250412433713543, |
| "grad_norm": 13.067319869995117, |
| "learning_rate": 1.9999768558594106e-07, |
| "loss": 1.1106, |
| "step": 5700 |
| }, |
| { |
| "epoch": 0.005259623683597251, |
| "grad_norm": 12.755809783935547, |
| "learning_rate": 1.999976757269316e-07, |
| "loss": 1.1357, |
| "step": 5710 |
| }, |
| { |
| "epoch": 0.005268834933480959, |
| "grad_norm": 12.585805892944336, |
| "learning_rate": 1.9999766584696811e-07, |
| "loss": 1.1561, |
| "step": 5720 |
| }, |
| { |
| "epoch": 0.005278046183364667, |
| "grad_norm": 11.118009567260742, |
| "learning_rate": 1.9999765594605063e-07, |
| "loss": 1.1003, |
| "step": 5730 |
| }, |
| { |
| "epoch": 0.005287257433248375, |
| "grad_norm": 12.356595993041992, |
| "learning_rate": 1.9999764602417914e-07, |
| "loss": 1.1313, |
| "step": 5740 |
| }, |
| { |
| "epoch": 0.0052964686831320825, |
| "grad_norm": 11.358268737792969, |
| "learning_rate": 1.999976360813537e-07, |
| "loss": 1.1411, |
| "step": 5750 |
| }, |
| { |
| "epoch": 0.005305679933015791, |
| "grad_norm": 13.17751693725586, |
| "learning_rate": 1.9999762611757424e-07, |
| "loss": 1.1205, |
| "step": 5760 |
| }, |
| { |
| "epoch": 0.005314891182899499, |
| "grad_norm": 13.820402145385742, |
| "learning_rate": 1.999976161328408e-07, |
| "loss": 1.1583, |
| "step": 5770 |
| }, |
| { |
| "epoch": 0.005324102432783207, |
| "grad_norm": 13.978683471679688, |
| "learning_rate": 1.999976061271534e-07, |
| "loss": 1.09, |
| "step": 5780 |
| }, |
| { |
| "epoch": 0.005333313682666914, |
| "grad_norm": 11.78137493133545, |
| "learning_rate": 1.9999759610051198e-07, |
| "loss": 1.1539, |
| "step": 5790 |
| }, |
| { |
| "epoch": 0.005342524932550623, |
| "grad_norm": 11.485994338989258, |
| "learning_rate": 1.9999758605291658e-07, |
| "loss": 1.0971, |
| "step": 5800 |
| }, |
| { |
| "epoch": 0.005351736182434331, |
| "grad_norm": 12.865667343139648, |
| "learning_rate": 1.9999757598436725e-07, |
| "loss": 1.1186, |
| "step": 5810 |
| }, |
| { |
| "epoch": 0.005360947432318039, |
| "grad_norm": 12.713491439819336, |
| "learning_rate": 1.9999756589486386e-07, |
| "loss": 1.0808, |
| "step": 5820 |
| }, |
| { |
| "epoch": 0.005370158682201746, |
| "grad_norm": 14.523711204528809, |
| "learning_rate": 1.9999755578440655e-07, |
| "loss": 1.1207, |
| "step": 5830 |
| }, |
| { |
| "epoch": 0.005379369932085455, |
| "grad_norm": 41.16556167602539, |
| "learning_rate": 1.9999754565299525e-07, |
| "loss": 1.1197, |
| "step": 5840 |
| }, |
| { |
| "epoch": 0.005388581181969163, |
| "grad_norm": 11.416475296020508, |
| "learning_rate": 1.9999753550062998e-07, |
| "loss": 1.1272, |
| "step": 5850 |
| }, |
| { |
| "epoch": 0.0053977924318528705, |
| "grad_norm": 16.418251037597656, |
| "learning_rate": 1.9999752532731072e-07, |
| "loss": 1.1313, |
| "step": 5860 |
| }, |
| { |
| "epoch": 0.005407003681736578, |
| "grad_norm": 11.807670593261719, |
| "learning_rate": 1.9999751513303754e-07, |
| "loss": 1.1046, |
| "step": 5870 |
| }, |
| { |
| "epoch": 0.005416214931620287, |
| "grad_norm": 12.594863891601562, |
| "learning_rate": 1.9999750491781035e-07, |
| "loss": 1.1504, |
| "step": 5880 |
| }, |
| { |
| "epoch": 0.005425426181503995, |
| "grad_norm": 11.818333625793457, |
| "learning_rate": 1.9999749468162923e-07, |
| "loss": 1.1875, |
| "step": 5890 |
| }, |
| { |
| "epoch": 0.0054346374313877024, |
| "grad_norm": 11.927218437194824, |
| "learning_rate": 1.9999748442449413e-07, |
| "loss": 1.1123, |
| "step": 5900 |
| }, |
| { |
| "epoch": 0.00544384868127141, |
| "grad_norm": 11.163846015930176, |
| "learning_rate": 1.9999747414640505e-07, |
| "loss": 1.1468, |
| "step": 5910 |
| }, |
| { |
| "epoch": 0.005453059931155118, |
| "grad_norm": 16.610689163208008, |
| "learning_rate": 1.9999746384736205e-07, |
| "loss": 1.1479, |
| "step": 5920 |
| }, |
| { |
| "epoch": 0.005462271181038827, |
| "grad_norm": 13.242773056030273, |
| "learning_rate": 1.999974535273651e-07, |
| "loss": 1.1412, |
| "step": 5930 |
| }, |
| { |
| "epoch": 0.005471482430922534, |
| "grad_norm": 12.669397354125977, |
| "learning_rate": 1.9999744318641417e-07, |
| "loss": 1.1685, |
| "step": 5940 |
| }, |
| { |
| "epoch": 0.005480693680806242, |
| "grad_norm": 16.57560157775879, |
| "learning_rate": 1.999974328245093e-07, |
| "loss": 1.1746, |
| "step": 5950 |
| }, |
| { |
| "epoch": 0.00548990493068995, |
| "grad_norm": 16.463863372802734, |
| "learning_rate": 1.9999742244165048e-07, |
| "loss": 1.1507, |
| "step": 5960 |
| }, |
| { |
| "epoch": 0.0054991161805736585, |
| "grad_norm": 13.99154281616211, |
| "learning_rate": 1.999974120378377e-07, |
| "loss": 1.1031, |
| "step": 5970 |
| }, |
| { |
| "epoch": 0.005508327430457366, |
| "grad_norm": 11.488706588745117, |
| "learning_rate": 1.99997401613071e-07, |
| "loss": 1.1156, |
| "step": 5980 |
| }, |
| { |
| "epoch": 0.005517538680341074, |
| "grad_norm": 12.002022743225098, |
| "learning_rate": 1.9999739116735034e-07, |
| "loss": 1.1265, |
| "step": 5990 |
| }, |
| { |
| "epoch": 0.005526749930224782, |
| "grad_norm": 10.4370756149292, |
| "learning_rate": 1.9999738070067575e-07, |
| "loss": 1.1237, |
| "step": 6000 |
| }, |
| { |
| "epoch": 0.0055359611801084905, |
| "grad_norm": 13.410931587219238, |
| "learning_rate": 1.9999737021304724e-07, |
| "loss": 1.1786, |
| "step": 6010 |
| }, |
| { |
| "epoch": 0.005545172429992198, |
| "grad_norm": 11.271903038024902, |
| "learning_rate": 1.9999735970446477e-07, |
| "loss": 1.097, |
| "step": 6020 |
| }, |
| { |
| "epoch": 0.005554383679875906, |
| "grad_norm": 11.350632667541504, |
| "learning_rate": 1.9999734917492838e-07, |
| "loss": 1.1565, |
| "step": 6030 |
| }, |
| { |
| "epoch": 0.005563594929759614, |
| "grad_norm": 15.094610214233398, |
| "learning_rate": 1.9999733862443806e-07, |
| "loss": 1.1317, |
| "step": 6040 |
| }, |
| { |
| "epoch": 0.005572806179643322, |
| "grad_norm": 10.750134468078613, |
| "learning_rate": 1.999973280529938e-07, |
| "loss": 1.1238, |
| "step": 6050 |
| }, |
| { |
| "epoch": 0.00558201742952703, |
| "grad_norm": 12.563965797424316, |
| "learning_rate": 1.9999731746059563e-07, |
| "loss": 1.1446, |
| "step": 6060 |
| }, |
| { |
| "epoch": 0.005591228679410738, |
| "grad_norm": 11.362217903137207, |
| "learning_rate": 1.9999730684724353e-07, |
| "loss": 1.1446, |
| "step": 6070 |
| }, |
| { |
| "epoch": 0.005600439929294446, |
| "grad_norm": 10.743291854858398, |
| "learning_rate": 1.999972962129375e-07, |
| "loss": 1.1523, |
| "step": 6080 |
| }, |
| { |
| "epoch": 0.0056096511791781534, |
| "grad_norm": 11.920190811157227, |
| "learning_rate": 1.9999728555767756e-07, |
| "loss": 1.1334, |
| "step": 6090 |
| }, |
| { |
| "epoch": 0.005618862429061862, |
| "grad_norm": 12.776474952697754, |
| "learning_rate": 1.999972748814637e-07, |
| "loss": 1.1126, |
| "step": 6100 |
| }, |
| { |
| "epoch": 0.00562807367894557, |
| "grad_norm": 11.819724082946777, |
| "learning_rate": 1.999972641842959e-07, |
| "loss": 1.1438, |
| "step": 6110 |
| }, |
| { |
| "epoch": 0.005637284928829278, |
| "grad_norm": 10.685623168945312, |
| "learning_rate": 1.9999725346617422e-07, |
| "loss": 1.1286, |
| "step": 6120 |
| }, |
| { |
| "epoch": 0.005646496178712985, |
| "grad_norm": 11.812180519104004, |
| "learning_rate": 1.999972427270986e-07, |
| "loss": 1.175, |
| "step": 6130 |
| }, |
| { |
| "epoch": 0.005655707428596694, |
| "grad_norm": 16.078460693359375, |
| "learning_rate": 1.9999723196706911e-07, |
| "loss": 1.1622, |
| "step": 6140 |
| }, |
| { |
| "epoch": 0.005664918678480402, |
| "grad_norm": 13.375402450561523, |
| "learning_rate": 1.999972211860857e-07, |
| "loss": 1.1607, |
| "step": 6150 |
| }, |
| { |
| "epoch": 0.0056741299283641095, |
| "grad_norm": 11.824726104736328, |
| "learning_rate": 1.9999721038414838e-07, |
| "loss": 1.089, |
| "step": 6160 |
| }, |
| { |
| "epoch": 0.005683341178247817, |
| "grad_norm": 12.499044418334961, |
| "learning_rate": 1.9999719956125715e-07, |
| "loss": 1.0954, |
| "step": 6170 |
| }, |
| { |
| "epoch": 0.005692552428131526, |
| "grad_norm": 13.754608154296875, |
| "learning_rate": 1.9999718871741203e-07, |
| "loss": 1.1411, |
| "step": 6180 |
| }, |
| { |
| "epoch": 0.005701763678015234, |
| "grad_norm": 13.273269653320312, |
| "learning_rate": 1.99997177852613e-07, |
| "loss": 1.124, |
| "step": 6190 |
| }, |
| { |
| "epoch": 0.0057109749278989414, |
| "grad_norm": 13.18433952331543, |
| "learning_rate": 1.999971669668601e-07, |
| "loss": 1.1543, |
| "step": 6200 |
| }, |
| { |
| "epoch": 0.005720186177782649, |
| "grad_norm": 12.198955535888672, |
| "learning_rate": 1.999971560601533e-07, |
| "loss": 1.105, |
| "step": 6210 |
| }, |
| { |
| "epoch": 0.005729397427666358, |
| "grad_norm": 12.773154258728027, |
| "learning_rate": 1.999971451324926e-07, |
| "loss": 1.1522, |
| "step": 6220 |
| }, |
| { |
| "epoch": 0.005738608677550066, |
| "grad_norm": 12.310616493225098, |
| "learning_rate": 1.99997134183878e-07, |
| "loss": 1.1574, |
| "step": 6230 |
| }, |
| { |
| "epoch": 0.005747819927433773, |
| "grad_norm": 17.403564453125, |
| "learning_rate": 1.9999712321430956e-07, |
| "loss": 1.1212, |
| "step": 6240 |
| }, |
| { |
| "epoch": 0.005757031177317481, |
| "grad_norm": 18.106422424316406, |
| "learning_rate": 1.999971122237872e-07, |
| "loss": 1.124, |
| "step": 6250 |
| }, |
| { |
| "epoch": 0.00576624242720119, |
| "grad_norm": 12.24715518951416, |
| "learning_rate": 1.99997101212311e-07, |
| "loss": 1.0458, |
| "step": 6260 |
| }, |
| { |
| "epoch": 0.0057754536770848975, |
| "grad_norm": 10.603114128112793, |
| "learning_rate": 1.9999709017988086e-07, |
| "loss": 1.1699, |
| "step": 6270 |
| }, |
| { |
| "epoch": 0.005784664926968605, |
| "grad_norm": 12.13444709777832, |
| "learning_rate": 1.999970791264969e-07, |
| "loss": 1.106, |
| "step": 6280 |
| }, |
| { |
| "epoch": 0.005793876176852313, |
| "grad_norm": 12.899703979492188, |
| "learning_rate": 1.9999706805215904e-07, |
| "loss": 1.1461, |
| "step": 6290 |
| }, |
| { |
| "epoch": 0.005803087426736021, |
| "grad_norm": 14.141440391540527, |
| "learning_rate": 1.999970569568673e-07, |
| "loss": 1.1208, |
| "step": 6300 |
| }, |
| { |
| "epoch": 0.0058122986766197295, |
| "grad_norm": 11.572757720947266, |
| "learning_rate": 1.999970458406217e-07, |
| "loss": 1.0977, |
| "step": 6310 |
| }, |
| { |
| "epoch": 0.005821509926503437, |
| "grad_norm": 26.14940071105957, |
| "learning_rate": 1.9999703470342225e-07, |
| "loss": 1.1397, |
| "step": 6320 |
| }, |
| { |
| "epoch": 0.005830721176387145, |
| "grad_norm": 10.017374038696289, |
| "learning_rate": 1.999970235452689e-07, |
| "loss": 1.1535, |
| "step": 6330 |
| }, |
| { |
| "epoch": 0.005839932426270853, |
| "grad_norm": 11.262249946594238, |
| "learning_rate": 1.999970123661617e-07, |
| "loss": 1.1098, |
| "step": 6340 |
| }, |
| { |
| "epoch": 0.005849143676154561, |
| "grad_norm": 11.205881118774414, |
| "learning_rate": 1.9999700116610066e-07, |
| "loss": 1.0703, |
| "step": 6350 |
| }, |
| { |
| "epoch": 0.005858354926038269, |
| "grad_norm": 13.461429595947266, |
| "learning_rate": 1.9999698994508575e-07, |
| "loss": 1.0914, |
| "step": 6360 |
| }, |
| { |
| "epoch": 0.005867566175921977, |
| "grad_norm": 12.487695693969727, |
| "learning_rate": 1.9999697870311695e-07, |
| "loss": 1.0687, |
| "step": 6370 |
| }, |
| { |
| "epoch": 0.005876777425805685, |
| "grad_norm": 10.686612129211426, |
| "learning_rate": 1.9999696744019434e-07, |
| "loss": 1.0794, |
| "step": 6380 |
| }, |
| { |
| "epoch": 0.005885988675689393, |
| "grad_norm": 11.470562934875488, |
| "learning_rate": 1.9999695615631785e-07, |
| "loss": 1.1186, |
| "step": 6390 |
| }, |
| { |
| "epoch": 0.005895199925573101, |
| "grad_norm": 10.777716636657715, |
| "learning_rate": 1.9999694485148754e-07, |
| "loss": 1.0816, |
| "step": 6400 |
| }, |
| { |
| "epoch": 0.005904411175456809, |
| "grad_norm": 14.13252067565918, |
| "learning_rate": 1.9999693352570336e-07, |
| "loss": 1.1586, |
| "step": 6410 |
| }, |
| { |
| "epoch": 0.005913622425340517, |
| "grad_norm": 12.646770477294922, |
| "learning_rate": 1.9999692217896538e-07, |
| "loss": 1.1052, |
| "step": 6420 |
| }, |
| { |
| "epoch": 0.005922833675224225, |
| "grad_norm": 12.746360778808594, |
| "learning_rate": 1.9999691081127352e-07, |
| "loss": 1.128, |
| "step": 6430 |
| }, |
| { |
| "epoch": 0.005932044925107933, |
| "grad_norm": 13.182230949401855, |
| "learning_rate": 1.9999689942262782e-07, |
| "loss": 1.1635, |
| "step": 6440 |
| }, |
| { |
| "epoch": 0.005941256174991641, |
| "grad_norm": 12.712933540344238, |
| "learning_rate": 1.999968880130283e-07, |
| "loss": 1.0786, |
| "step": 6450 |
| }, |
| { |
| "epoch": 0.0059504674248753485, |
| "grad_norm": 17.82648277282715, |
| "learning_rate": 1.9999687658247493e-07, |
| "loss": 1.1121, |
| "step": 6460 |
| }, |
| { |
| "epoch": 0.005959678674759057, |
| "grad_norm": 10.265266418457031, |
| "learning_rate": 1.9999686513096776e-07, |
| "loss": 1.136, |
| "step": 6470 |
| }, |
| { |
| "epoch": 0.005968889924642765, |
| "grad_norm": 11.721809387207031, |
| "learning_rate": 1.9999685365850674e-07, |
| "loss": 1.104, |
| "step": 6480 |
| }, |
| { |
| "epoch": 0.005978101174526473, |
| "grad_norm": 10.478239059448242, |
| "learning_rate": 1.999968421650919e-07, |
| "loss": 1.0841, |
| "step": 6490 |
| }, |
| { |
| "epoch": 0.0059873124244101805, |
| "grad_norm": 15.492977142333984, |
| "learning_rate": 1.9999683065072322e-07, |
| "loss": 1.1335, |
| "step": 6500 |
| }, |
| { |
| "epoch": 0.005996523674293888, |
| "grad_norm": 13.186904907226562, |
| "learning_rate": 1.9999681911540071e-07, |
| "loss": 1.0739, |
| "step": 6510 |
| }, |
| { |
| "epoch": 0.006005734924177597, |
| "grad_norm": 16.937009811401367, |
| "learning_rate": 1.999968075591244e-07, |
| "loss": 1.1586, |
| "step": 6520 |
| }, |
| { |
| "epoch": 0.006014946174061305, |
| "grad_norm": 11.617034912109375, |
| "learning_rate": 1.9999679598189426e-07, |
| "loss": 1.1257, |
| "step": 6530 |
| }, |
| { |
| "epoch": 0.006024157423945012, |
| "grad_norm": 12.224775314331055, |
| "learning_rate": 1.9999678438371032e-07, |
| "loss": 1.087, |
| "step": 6540 |
| }, |
| { |
| "epoch": 0.00603336867382872, |
| "grad_norm": 10.866127967834473, |
| "learning_rate": 1.9999677276457255e-07, |
| "loss": 1.1095, |
| "step": 6550 |
| }, |
| { |
| "epoch": 0.006042579923712429, |
| "grad_norm": 13.364578247070312, |
| "learning_rate": 1.9999676112448101e-07, |
| "loss": 1.1337, |
| "step": 6560 |
| }, |
| { |
| "epoch": 0.0060517911735961365, |
| "grad_norm": 10.99776840209961, |
| "learning_rate": 1.9999674946343563e-07, |
| "loss": 1.1329, |
| "step": 6570 |
| }, |
| { |
| "epoch": 0.006061002423479844, |
| "grad_norm": 12.15756893157959, |
| "learning_rate": 1.9999673778143645e-07, |
| "loss": 1.1407, |
| "step": 6580 |
| }, |
| { |
| "epoch": 0.006070213673363552, |
| "grad_norm": 11.56879711151123, |
| "learning_rate": 1.9999672607848348e-07, |
| "loss": 1.1423, |
| "step": 6590 |
| }, |
| { |
| "epoch": 0.006079424923247261, |
| "grad_norm": 10.917329788208008, |
| "learning_rate": 1.999967143545767e-07, |
| "loss": 1.0308, |
| "step": 6600 |
| }, |
| { |
| "epoch": 0.0060886361731309685, |
| "grad_norm": 11.400591850280762, |
| "learning_rate": 1.9999670260971612e-07, |
| "loss": 1.1156, |
| "step": 6610 |
| }, |
| { |
| "epoch": 0.006097847423014676, |
| "grad_norm": 22.441970825195312, |
| "learning_rate": 1.9999669084390173e-07, |
| "loss": 1.0721, |
| "step": 6620 |
| }, |
| { |
| "epoch": 0.006107058672898384, |
| "grad_norm": 12.816572189331055, |
| "learning_rate": 1.9999667905713355e-07, |
| "loss": 1.0678, |
| "step": 6630 |
| }, |
| { |
| "epoch": 0.006116269922782093, |
| "grad_norm": 10.841035842895508, |
| "learning_rate": 1.999966672494116e-07, |
| "loss": 1.1213, |
| "step": 6640 |
| }, |
| { |
| "epoch": 0.0061254811726658, |
| "grad_norm": 12.051841735839844, |
| "learning_rate": 1.9999665542073588e-07, |
| "loss": 1.1462, |
| "step": 6650 |
| }, |
| { |
| "epoch": 0.006134692422549508, |
| "grad_norm": 10.909770965576172, |
| "learning_rate": 1.9999664357110632e-07, |
| "loss": 1.1359, |
| "step": 6660 |
| }, |
| { |
| "epoch": 0.006143903672433216, |
| "grad_norm": 11.57640552520752, |
| "learning_rate": 1.99996631700523e-07, |
| "loss": 1.1223, |
| "step": 6670 |
| }, |
| { |
| "epoch": 0.006153114922316924, |
| "grad_norm": 12.645811080932617, |
| "learning_rate": 1.9999661980898593e-07, |
| "loss": 1.1088, |
| "step": 6680 |
| }, |
| { |
| "epoch": 0.006162326172200632, |
| "grad_norm": 10.874650001525879, |
| "learning_rate": 1.9999660789649506e-07, |
| "loss": 1.0978, |
| "step": 6690 |
| }, |
| { |
| "epoch": 0.00617153742208434, |
| "grad_norm": 10.974211692810059, |
| "learning_rate": 1.9999659596305042e-07, |
| "loss": 1.0982, |
| "step": 6700 |
| }, |
| { |
| "epoch": 0.006180748671968048, |
| "grad_norm": 44.23649215698242, |
| "learning_rate": 1.9999658400865198e-07, |
| "loss": 1.0941, |
| "step": 6710 |
| }, |
| { |
| "epoch": 0.006189959921851756, |
| "grad_norm": 21.356443405151367, |
| "learning_rate": 1.999965720332998e-07, |
| "loss": 1.1536, |
| "step": 6720 |
| }, |
| { |
| "epoch": 0.006199171171735464, |
| "grad_norm": 11.17716121673584, |
| "learning_rate": 1.9999656003699385e-07, |
| "loss": 1.0988, |
| "step": 6730 |
| }, |
| { |
| "epoch": 0.006208382421619172, |
| "grad_norm": 12.178362846374512, |
| "learning_rate": 1.999965480197341e-07, |
| "loss": 1.1405, |
| "step": 6740 |
| }, |
| { |
| "epoch": 0.00621759367150288, |
| "grad_norm": 12.388544082641602, |
| "learning_rate": 1.9999653598152065e-07, |
| "loss": 1.0938, |
| "step": 6750 |
| }, |
| { |
| "epoch": 0.0062268049213865875, |
| "grad_norm": 12.290316581726074, |
| "learning_rate": 1.9999652392235337e-07, |
| "loss": 1.1116, |
| "step": 6760 |
| }, |
| { |
| "epoch": 0.006236016171270296, |
| "grad_norm": 13.840502738952637, |
| "learning_rate": 1.9999651184223238e-07, |
| "loss": 1.1459, |
| "step": 6770 |
| }, |
| { |
| "epoch": 0.006245227421154004, |
| "grad_norm": 10.720335006713867, |
| "learning_rate": 1.999964997411576e-07, |
| "loss": 1.1422, |
| "step": 6780 |
| }, |
| { |
| "epoch": 0.006254438671037712, |
| "grad_norm": 11.053603172302246, |
| "learning_rate": 1.9999648761912913e-07, |
| "loss": 1.0698, |
| "step": 6790 |
| }, |
| { |
| "epoch": 0.0062636499209214195, |
| "grad_norm": 12.491558074951172, |
| "learning_rate": 1.9999647547614685e-07, |
| "loss": 1.1304, |
| "step": 6800 |
| }, |
| { |
| "epoch": 0.006272861170805128, |
| "grad_norm": 10.658504486083984, |
| "learning_rate": 1.9999646331221084e-07, |
| "loss": 1.1854, |
| "step": 6810 |
| }, |
| { |
| "epoch": 0.006282072420688836, |
| "grad_norm": 12.681370735168457, |
| "learning_rate": 1.9999645112732108e-07, |
| "loss": 1.0653, |
| "step": 6820 |
| }, |
| { |
| "epoch": 0.006291283670572544, |
| "grad_norm": 12.169018745422363, |
| "learning_rate": 1.999964389214776e-07, |
| "loss": 1.1184, |
| "step": 6830 |
| }, |
| { |
| "epoch": 0.006300494920456251, |
| "grad_norm": 11.624190330505371, |
| "learning_rate": 1.9999642669468036e-07, |
| "loss": 1.1222, |
| "step": 6840 |
| }, |
| { |
| "epoch": 0.00630970617033996, |
| "grad_norm": 10.29429817199707, |
| "learning_rate": 1.999964144469294e-07, |
| "loss": 1.119, |
| "step": 6850 |
| }, |
| { |
| "epoch": 0.006318917420223668, |
| "grad_norm": 12.683826446533203, |
| "learning_rate": 1.999964021782247e-07, |
| "loss": 1.0833, |
| "step": 6860 |
| }, |
| { |
| "epoch": 0.0063281286701073755, |
| "grad_norm": 20.82172203063965, |
| "learning_rate": 1.9999638988856625e-07, |
| "loss": 1.1231, |
| "step": 6870 |
| }, |
| { |
| "epoch": 0.006337339919991083, |
| "grad_norm": 12.441207885742188, |
| "learning_rate": 1.9999637757795406e-07, |
| "loss": 1.0643, |
| "step": 6880 |
| }, |
| { |
| "epoch": 0.006346551169874791, |
| "grad_norm": 13.605744361877441, |
| "learning_rate": 1.9999636524638816e-07, |
| "loss": 1.1002, |
| "step": 6890 |
| }, |
| { |
| "epoch": 0.0063557624197585, |
| "grad_norm": 12.78536605834961, |
| "learning_rate": 1.9999635289386854e-07, |
| "loss": 1.0842, |
| "step": 6900 |
| }, |
| { |
| "epoch": 0.0063649736696422075, |
| "grad_norm": 12.114583969116211, |
| "learning_rate": 1.9999634052039523e-07, |
| "loss": 1.1061, |
| "step": 6910 |
| }, |
| { |
| "epoch": 0.006374184919525915, |
| "grad_norm": 10.829866409301758, |
| "learning_rate": 1.9999632812596813e-07, |
| "loss": 1.1945, |
| "step": 6920 |
| }, |
| { |
| "epoch": 0.006383396169409623, |
| "grad_norm": 11.205638885498047, |
| "learning_rate": 1.9999631571058737e-07, |
| "loss": 1.1449, |
| "step": 6930 |
| }, |
| { |
| "epoch": 0.006392607419293332, |
| "grad_norm": 13.620341300964355, |
| "learning_rate": 1.9999630327425288e-07, |
| "loss": 1.0994, |
| "step": 6940 |
| }, |
| { |
| "epoch": 0.006401818669177039, |
| "grad_norm": 11.192842483520508, |
| "learning_rate": 1.9999629081696469e-07, |
| "loss": 1.1509, |
| "step": 6950 |
| }, |
| { |
| "epoch": 0.006411029919060747, |
| "grad_norm": 10.575221061706543, |
| "learning_rate": 1.9999627833872275e-07, |
| "loss": 1.1303, |
| "step": 6960 |
| }, |
| { |
| "epoch": 0.006420241168944455, |
| "grad_norm": 11.54958724975586, |
| "learning_rate": 1.9999626583952714e-07, |
| "loss": 1.0861, |
| "step": 6970 |
| }, |
| { |
| "epoch": 0.0064294524188281636, |
| "grad_norm": 10.372851371765137, |
| "learning_rate": 1.9999625331937783e-07, |
| "loss": 1.0703, |
| "step": 6980 |
| }, |
| { |
| "epoch": 0.006438663668711871, |
| "grad_norm": 11.92662525177002, |
| "learning_rate": 1.999962407782748e-07, |
| "loss": 1.0787, |
| "step": 6990 |
| }, |
| { |
| "epoch": 0.006447874918595579, |
| "grad_norm": 12.739754676818848, |
| "learning_rate": 1.9999622821621807e-07, |
| "loss": 1.1289, |
| "step": 7000 |
| }, |
| { |
| "epoch": 0.006457086168479287, |
| "grad_norm": 13.717080116271973, |
| "learning_rate": 1.9999621563320766e-07, |
| "loss": 1.1083, |
| "step": 7010 |
| }, |
| { |
| "epoch": 0.0064662974183629955, |
| "grad_norm": 10.639162063598633, |
| "learning_rate": 1.9999620302924353e-07, |
| "loss": 1.1297, |
| "step": 7020 |
| }, |
| { |
| "epoch": 0.006475508668246703, |
| "grad_norm": 32.09090805053711, |
| "learning_rate": 1.9999619040432574e-07, |
| "loss": 1.1298, |
| "step": 7030 |
| }, |
| { |
| "epoch": 0.006484719918130411, |
| "grad_norm": 12.339664459228516, |
| "learning_rate": 1.9999617775845423e-07, |
| "loss": 1.1063, |
| "step": 7040 |
| }, |
| { |
| "epoch": 0.006493931168014119, |
| "grad_norm": 11.175090789794922, |
| "learning_rate": 1.999961650916291e-07, |
| "loss": 1.1119, |
| "step": 7050 |
| }, |
| { |
| "epoch": 0.006503142417897827, |
| "grad_norm": 11.646284103393555, |
| "learning_rate": 1.999961524038502e-07, |
| "loss": 1.0748, |
| "step": 7060 |
| }, |
| { |
| "epoch": 0.006512353667781535, |
| "grad_norm": 12.031477928161621, |
| "learning_rate": 1.9999613969511768e-07, |
| "loss": 1.123, |
| "step": 7070 |
| }, |
| { |
| "epoch": 0.006521564917665243, |
| "grad_norm": 10.209918022155762, |
| "learning_rate": 1.9999612696543147e-07, |
| "loss": 1.1093, |
| "step": 7080 |
| }, |
| { |
| "epoch": 0.006530776167548951, |
| "grad_norm": 10.993101119995117, |
| "learning_rate": 1.999961142147916e-07, |
| "loss": 1.1427, |
| "step": 7090 |
| }, |
| { |
| "epoch": 0.0065399874174326585, |
| "grad_norm": 11.72935676574707, |
| "learning_rate": 1.9999610144319803e-07, |
| "loss": 1.1147, |
| "step": 7100 |
| }, |
| { |
| "epoch": 0.006549198667316367, |
| "grad_norm": 47.72971725463867, |
| "learning_rate": 1.9999608865065078e-07, |
| "loss": 1.1203, |
| "step": 7110 |
| }, |
| { |
| "epoch": 0.006558409917200075, |
| "grad_norm": 11.274374961853027, |
| "learning_rate": 1.999960758371499e-07, |
| "loss": 1.1324, |
| "step": 7120 |
| }, |
| { |
| "epoch": 0.006567621167083783, |
| "grad_norm": 12.471686363220215, |
| "learning_rate": 1.9999606300269534e-07, |
| "loss": 1.1431, |
| "step": 7130 |
| }, |
| { |
| "epoch": 0.00657683241696749, |
| "grad_norm": 12.744390487670898, |
| "learning_rate": 1.999960501472871e-07, |
| "loss": 1.1391, |
| "step": 7140 |
| }, |
| { |
| "epoch": 0.006586043666851199, |
| "grad_norm": 12.682497024536133, |
| "learning_rate": 1.9999603727092525e-07, |
| "loss": 1.0761, |
| "step": 7150 |
| }, |
| { |
| "epoch": 0.006595254916734907, |
| "grad_norm": 12.342095375061035, |
| "learning_rate": 1.9999602437360968e-07, |
| "loss": 1.0855, |
| "step": 7160 |
| }, |
| { |
| "epoch": 0.0066044661666186145, |
| "grad_norm": 11.812524795532227, |
| "learning_rate": 1.999960114553405e-07, |
| "loss": 1.0633, |
| "step": 7170 |
| }, |
| { |
| "epoch": 0.006613677416502322, |
| "grad_norm": 12.257493019104004, |
| "learning_rate": 1.9999599851611767e-07, |
| "loss": 1.0702, |
| "step": 7180 |
| }, |
| { |
| "epoch": 0.006622888666386031, |
| "grad_norm": 15.87657356262207, |
| "learning_rate": 1.9999598555594117e-07, |
| "loss": 1.0892, |
| "step": 7190 |
| }, |
| { |
| "epoch": 0.006632099916269739, |
| "grad_norm": 12.35704231262207, |
| "learning_rate": 1.9999597257481105e-07, |
| "loss": 1.1333, |
| "step": 7200 |
| }, |
| { |
| "epoch": 0.0066413111661534465, |
| "grad_norm": 10.380608558654785, |
| "learning_rate": 1.9999595957272728e-07, |
| "loss": 1.0983, |
| "step": 7210 |
| }, |
| { |
| "epoch": 0.006650522416037154, |
| "grad_norm": 11.658407211303711, |
| "learning_rate": 1.9999594654968987e-07, |
| "loss": 1.0846, |
| "step": 7220 |
| }, |
| { |
| "epoch": 0.006659733665920863, |
| "grad_norm": 11.98071002960205, |
| "learning_rate": 1.9999593350569882e-07, |
| "loss": 1.155, |
| "step": 7230 |
| }, |
| { |
| "epoch": 0.006668944915804571, |
| "grad_norm": 11.475347518920898, |
| "learning_rate": 1.9999592044075416e-07, |
| "loss": 1.1128, |
| "step": 7240 |
| }, |
| { |
| "epoch": 0.006678156165688278, |
| "grad_norm": 11.06085205078125, |
| "learning_rate": 1.9999590735485584e-07, |
| "loss": 1.1584, |
| "step": 7250 |
| }, |
| { |
| "epoch": 0.006687367415571986, |
| "grad_norm": 11.058730125427246, |
| "learning_rate": 1.9999589424800386e-07, |
| "loss": 1.0768, |
| "step": 7260 |
| }, |
| { |
| "epoch": 0.006696578665455694, |
| "grad_norm": 10.531845092773438, |
| "learning_rate": 1.9999588112019832e-07, |
| "loss": 1.0659, |
| "step": 7270 |
| }, |
| { |
| "epoch": 0.0067057899153394026, |
| "grad_norm": 10.828505516052246, |
| "learning_rate": 1.9999586797143912e-07, |
| "loss": 1.074, |
| "step": 7280 |
| }, |
| { |
| "epoch": 0.00671500116522311, |
| "grad_norm": 10.694981575012207, |
| "learning_rate": 1.999958548017263e-07, |
| "loss": 1.1178, |
| "step": 7290 |
| }, |
| { |
| "epoch": 0.006724212415106818, |
| "grad_norm": 10.756375312805176, |
| "learning_rate": 1.9999584161105988e-07, |
| "loss": 1.1042, |
| "step": 7300 |
| }, |
| { |
| "epoch": 0.006733423664990526, |
| "grad_norm": 10.571388244628906, |
| "learning_rate": 1.9999582839943985e-07, |
| "loss": 1.1143, |
| "step": 7310 |
| }, |
| { |
| "epoch": 0.0067426349148742345, |
| "grad_norm": 12.96018123626709, |
| "learning_rate": 1.9999581516686618e-07, |
| "loss": 1.0548, |
| "step": 7320 |
| }, |
| { |
| "epoch": 0.006751846164757942, |
| "grad_norm": 11.269163131713867, |
| "learning_rate": 1.9999580191333893e-07, |
| "loss": 1.1127, |
| "step": 7330 |
| }, |
| { |
| "epoch": 0.00676105741464165, |
| "grad_norm": 13.715166091918945, |
| "learning_rate": 1.9999578863885804e-07, |
| "loss": 1.0901, |
| "step": 7340 |
| }, |
| { |
| "epoch": 0.006770268664525358, |
| "grad_norm": 12.425786018371582, |
| "learning_rate": 1.9999577534342357e-07, |
| "loss": 1.1019, |
| "step": 7350 |
| }, |
| { |
| "epoch": 0.006779479914409066, |
| "grad_norm": 11.684393882751465, |
| "learning_rate": 1.999957620270355e-07, |
| "loss": 1.1158, |
| "step": 7360 |
| }, |
| { |
| "epoch": 0.006788691164292774, |
| "grad_norm": 12.401206970214844, |
| "learning_rate": 1.9999574868969382e-07, |
| "loss": 1.1013, |
| "step": 7370 |
| }, |
| { |
| "epoch": 0.006797902414176482, |
| "grad_norm": 10.5696382522583, |
| "learning_rate": 1.9999573533139857e-07, |
| "loss": 1.1485, |
| "step": 7380 |
| }, |
| { |
| "epoch": 0.00680711366406019, |
| "grad_norm": 11.542669296264648, |
| "learning_rate": 1.9999572195214972e-07, |
| "loss": 1.0899, |
| "step": 7390 |
| }, |
| { |
| "epoch": 0.006816324913943898, |
| "grad_norm": 14.461751937866211, |
| "learning_rate": 1.9999570855194725e-07, |
| "loss": 1.0843, |
| "step": 7400 |
| }, |
| { |
| "epoch": 0.006825536163827606, |
| "grad_norm": 16.850862503051758, |
| "learning_rate": 1.999956951307912e-07, |
| "loss": 1.1273, |
| "step": 7410 |
| }, |
| { |
| "epoch": 0.006834747413711314, |
| "grad_norm": 14.927706718444824, |
| "learning_rate": 1.999956816886816e-07, |
| "loss": 1.1438, |
| "step": 7420 |
| }, |
| { |
| "epoch": 0.006843958663595022, |
| "grad_norm": 12.482978820800781, |
| "learning_rate": 1.999956682256184e-07, |
| "loss": 1.0591, |
| "step": 7430 |
| }, |
| { |
| "epoch": 0.00685316991347873, |
| "grad_norm": 11.566182136535645, |
| "learning_rate": 1.999956547416016e-07, |
| "loss": 1.1053, |
| "step": 7440 |
| }, |
| { |
| "epoch": 0.006862381163362438, |
| "grad_norm": 12.080305099487305, |
| "learning_rate": 1.9999564123663126e-07, |
| "loss": 1.079, |
| "step": 7450 |
| }, |
| { |
| "epoch": 0.006871592413246146, |
| "grad_norm": 13.21672248840332, |
| "learning_rate": 1.9999562771070732e-07, |
| "loss": 1.094, |
| "step": 7460 |
| }, |
| { |
| "epoch": 0.0068808036631298536, |
| "grad_norm": 12.294853210449219, |
| "learning_rate": 1.9999561416382982e-07, |
| "loss": 1.1221, |
| "step": 7470 |
| }, |
| { |
| "epoch": 0.006890014913013561, |
| "grad_norm": 11.741070747375488, |
| "learning_rate": 1.9999560059599876e-07, |
| "loss": 1.0761, |
| "step": 7480 |
| }, |
| { |
| "epoch": 0.00689922616289727, |
| "grad_norm": 11.063822746276855, |
| "learning_rate": 1.9999558700721412e-07, |
| "loss": 1.0759, |
| "step": 7490 |
| }, |
| { |
| "epoch": 0.006908437412780978, |
| "grad_norm": 11.67596435546875, |
| "learning_rate": 1.9999557339747593e-07, |
| "loss": 1.1265, |
| "step": 7500 |
| }, |
| { |
| "epoch": 0.0069176486626646855, |
| "grad_norm": 11.140481948852539, |
| "learning_rate": 1.9999555976678417e-07, |
| "loss": 1.144, |
| "step": 7510 |
| }, |
| { |
| "epoch": 0.006926859912548393, |
| "grad_norm": 11.030956268310547, |
| "learning_rate": 1.9999554611513886e-07, |
| "loss": 1.0425, |
| "step": 7520 |
| }, |
| { |
| "epoch": 0.006936071162432102, |
| "grad_norm": 9.714004516601562, |
| "learning_rate": 1.9999553244254002e-07, |
| "loss": 1.085, |
| "step": 7530 |
| }, |
| { |
| "epoch": 0.00694528241231581, |
| "grad_norm": 11.757089614868164, |
| "learning_rate": 1.999955187489876e-07, |
| "loss": 1.0947, |
| "step": 7540 |
| }, |
| { |
| "epoch": 0.006954493662199517, |
| "grad_norm": 12.053640365600586, |
| "learning_rate": 1.9999550503448167e-07, |
| "loss": 1.0629, |
| "step": 7550 |
| }, |
| { |
| "epoch": 0.006963704912083225, |
| "grad_norm": 13.974733352661133, |
| "learning_rate": 1.9999549129902213e-07, |
| "loss": 1.1113, |
| "step": 7560 |
| }, |
| { |
| "epoch": 0.006972916161966934, |
| "grad_norm": 12.889135360717773, |
| "learning_rate": 1.9999547754260912e-07, |
| "loss": 1.1195, |
| "step": 7570 |
| }, |
| { |
| "epoch": 0.0069821274118506416, |
| "grad_norm": 12.478116989135742, |
| "learning_rate": 1.9999546376524252e-07, |
| "loss": 1.0413, |
| "step": 7580 |
| }, |
| { |
| "epoch": 0.006991338661734349, |
| "grad_norm": 11.465994834899902, |
| "learning_rate": 1.9999544996692241e-07, |
| "loss": 1.0734, |
| "step": 7590 |
| }, |
| { |
| "epoch": 0.007000549911618057, |
| "grad_norm": 11.014551162719727, |
| "learning_rate": 1.9999543614764875e-07, |
| "loss": 1.1125, |
| "step": 7600 |
| }, |
| { |
| "epoch": 0.007009761161501766, |
| "grad_norm": 12.119184494018555, |
| "learning_rate": 1.999954223074216e-07, |
| "loss": 1.0905, |
| "step": 7610 |
| }, |
| { |
| "epoch": 0.0070189724113854735, |
| "grad_norm": 10.168764114379883, |
| "learning_rate": 1.9999540844624087e-07, |
| "loss": 1.0995, |
| "step": 7620 |
| }, |
| { |
| "epoch": 0.007028183661269181, |
| "grad_norm": 11.991799354553223, |
| "learning_rate": 1.9999539456410664e-07, |
| "loss": 1.0964, |
| "step": 7630 |
| }, |
| { |
| "epoch": 0.007037394911152889, |
| "grad_norm": 23.04439926147461, |
| "learning_rate": 1.9999538066101891e-07, |
| "loss": 1.0672, |
| "step": 7640 |
| }, |
| { |
| "epoch": 0.007046606161036598, |
| "grad_norm": 14.078396797180176, |
| "learning_rate": 1.9999536673697763e-07, |
| "loss": 1.1382, |
| "step": 7650 |
| }, |
| { |
| "epoch": 0.007055817410920305, |
| "grad_norm": 12.15142822265625, |
| "learning_rate": 1.9999535279198286e-07, |
| "loss": 1.0827, |
| "step": 7660 |
| }, |
| { |
| "epoch": 0.007065028660804013, |
| "grad_norm": 10.530996322631836, |
| "learning_rate": 1.9999533882603457e-07, |
| "loss": 1.0511, |
| "step": 7670 |
| }, |
| { |
| "epoch": 0.007074239910687721, |
| "grad_norm": 12.080910682678223, |
| "learning_rate": 1.9999532483913275e-07, |
| "loss": 1.0814, |
| "step": 7680 |
| }, |
| { |
| "epoch": 0.007083451160571429, |
| "grad_norm": 12.011896133422852, |
| "learning_rate": 1.9999531083127744e-07, |
| "loss": 1.085, |
| "step": 7690 |
| }, |
| { |
| "epoch": 0.007092662410455137, |
| "grad_norm": 10.504772186279297, |
| "learning_rate": 1.9999529680246864e-07, |
| "loss": 1.0837, |
| "step": 7700 |
| }, |
| { |
| "epoch": 0.007101873660338845, |
| "grad_norm": 13.050121307373047, |
| "learning_rate": 1.9999528275270633e-07, |
| "loss": 1.0736, |
| "step": 7710 |
| }, |
| { |
| "epoch": 0.007111084910222553, |
| "grad_norm": 14.976795196533203, |
| "learning_rate": 1.9999526868199051e-07, |
| "loss": 1.1461, |
| "step": 7720 |
| }, |
| { |
| "epoch": 0.007120296160106261, |
| "grad_norm": 10.797948837280273, |
| "learning_rate": 1.999952545903212e-07, |
| "loss": 1.0825, |
| "step": 7730 |
| }, |
| { |
| "epoch": 0.007129507409989969, |
| "grad_norm": 13.095974922180176, |
| "learning_rate": 1.999952404776984e-07, |
| "loss": 1.0808, |
| "step": 7740 |
| }, |
| { |
| "epoch": 0.007138718659873677, |
| "grad_norm": 12.81965446472168, |
| "learning_rate": 1.999952263441221e-07, |
| "loss": 1.0817, |
| "step": 7750 |
| }, |
| { |
| "epoch": 0.007147929909757385, |
| "grad_norm": 10.503836631774902, |
| "learning_rate": 1.9999521218959235e-07, |
| "loss": 1.0907, |
| "step": 7760 |
| }, |
| { |
| "epoch": 0.0071571411596410926, |
| "grad_norm": 13.46060848236084, |
| "learning_rate": 1.999951980141091e-07, |
| "loss": 1.1431, |
| "step": 7770 |
| }, |
| { |
| "epoch": 0.007166352409524801, |
| "grad_norm": 13.242213249206543, |
| "learning_rate": 1.9999518381767236e-07, |
| "loss": 1.103, |
| "step": 7780 |
| }, |
| { |
| "epoch": 0.007175563659408509, |
| "grad_norm": 12.271145820617676, |
| "learning_rate": 1.9999516960028213e-07, |
| "loss": 1.0824, |
| "step": 7790 |
| }, |
| { |
| "epoch": 0.007184774909292217, |
| "grad_norm": 10.907269477844238, |
| "learning_rate": 1.9999515536193844e-07, |
| "loss": 1.038, |
| "step": 7800 |
| }, |
| { |
| "epoch": 0.0071939861591759245, |
| "grad_norm": 11.432321548461914, |
| "learning_rate": 1.999951411026413e-07, |
| "loss": 1.1254, |
| "step": 7810 |
| }, |
| { |
| "epoch": 0.007203197409059633, |
| "grad_norm": 11.535138130187988, |
| "learning_rate": 1.9999512682239069e-07, |
| "loss": 1.1016, |
| "step": 7820 |
| }, |
| { |
| "epoch": 0.007212408658943341, |
| "grad_norm": 11.701823234558105, |
| "learning_rate": 1.999951125211866e-07, |
| "loss": 1.13, |
| "step": 7830 |
| }, |
| { |
| "epoch": 0.007221619908827049, |
| "grad_norm": 11.544978141784668, |
| "learning_rate": 1.9999509819902902e-07, |
| "loss": 1.0998, |
| "step": 7840 |
| }, |
| { |
| "epoch": 0.007230831158710756, |
| "grad_norm": 11.483750343322754, |
| "learning_rate": 1.99995083855918e-07, |
| "loss": 1.106, |
| "step": 7850 |
| }, |
| { |
| "epoch": 0.007240042408594464, |
| "grad_norm": 10.794761657714844, |
| "learning_rate": 1.9999506949185355e-07, |
| "loss": 1.0879, |
| "step": 7860 |
| }, |
| { |
| "epoch": 0.007249253658478173, |
| "grad_norm": 11.811905860900879, |
| "learning_rate": 1.9999505510683563e-07, |
| "loss": 1.0963, |
| "step": 7870 |
| }, |
| { |
| "epoch": 0.007258464908361881, |
| "grad_norm": 12.071663856506348, |
| "learning_rate": 1.9999504070086425e-07, |
| "loss": 1.0676, |
| "step": 7880 |
| }, |
| { |
| "epoch": 0.007267676158245588, |
| "grad_norm": 13.593671798706055, |
| "learning_rate": 1.9999502627393943e-07, |
| "loss": 1.0624, |
| "step": 7890 |
| }, |
| { |
| "epoch": 0.007276887408129296, |
| "grad_norm": 10.687907218933105, |
| "learning_rate": 1.9999501182606118e-07, |
| "loss": 1.0664, |
| "step": 7900 |
| }, |
| { |
| "epoch": 0.007286098658013005, |
| "grad_norm": 10.393074989318848, |
| "learning_rate": 1.9999499735722948e-07, |
| "loss": 1.0776, |
| "step": 7910 |
| }, |
| { |
| "epoch": 0.0072953099078967125, |
| "grad_norm": 10.972018241882324, |
| "learning_rate": 1.9999498286744435e-07, |
| "loss": 1.1301, |
| "step": 7920 |
| }, |
| { |
| "epoch": 0.00730452115778042, |
| "grad_norm": 10.537809371948242, |
| "learning_rate": 1.9999496835670575e-07, |
| "loss": 1.0516, |
| "step": 7930 |
| }, |
| { |
| "epoch": 0.007313732407664128, |
| "grad_norm": 12.946764945983887, |
| "learning_rate": 1.9999495382501377e-07, |
| "loss": 1.0802, |
| "step": 7940 |
| }, |
| { |
| "epoch": 0.007322943657547837, |
| "grad_norm": 12.875717163085938, |
| "learning_rate": 1.9999493927236832e-07, |
| "loss": 1.1128, |
| "step": 7950 |
| }, |
| { |
| "epoch": 0.007332154907431544, |
| "grad_norm": 11.909735679626465, |
| "learning_rate": 1.9999492469876947e-07, |
| "loss": 1.1428, |
| "step": 7960 |
| }, |
| { |
| "epoch": 0.007341366157315252, |
| "grad_norm": 11.238419532775879, |
| "learning_rate": 1.999949101042172e-07, |
| "loss": 1.1089, |
| "step": 7970 |
| }, |
| { |
| "epoch": 0.00735057740719896, |
| "grad_norm": 10.957053184509277, |
| "learning_rate": 1.999948954887115e-07, |
| "loss": 1.0781, |
| "step": 7980 |
| }, |
| { |
| "epoch": 0.007359788657082669, |
| "grad_norm": 11.11838436126709, |
| "learning_rate": 1.999948808522524e-07, |
| "loss": 1.0834, |
| "step": 7990 |
| }, |
| { |
| "epoch": 0.007368999906966376, |
| "grad_norm": 11.233697891235352, |
| "learning_rate": 1.9999486619483984e-07, |
| "loss": 1.1382, |
| "step": 8000 |
| }, |
| { |
| "epoch": 0.007378211156850084, |
| "grad_norm": 11.929357528686523, |
| "learning_rate": 1.999948515164739e-07, |
| "loss": 1.0773, |
| "step": 8010 |
| }, |
| { |
| "epoch": 0.007387422406733792, |
| "grad_norm": 13.492103576660156, |
| "learning_rate": 1.9999483681715455e-07, |
| "loss": 1.0893, |
| "step": 8020 |
| }, |
| { |
| "epoch": 0.0073966336566175005, |
| "grad_norm": 13.893610954284668, |
| "learning_rate": 1.9999482209688178e-07, |
| "loss": 1.0863, |
| "step": 8030 |
| }, |
| { |
| "epoch": 0.007405844906501208, |
| "grad_norm": 12.548665046691895, |
| "learning_rate": 1.9999480735565563e-07, |
| "loss": 1.1644, |
| "step": 8040 |
| }, |
| { |
| "epoch": 0.007415056156384916, |
| "grad_norm": 10.275520324707031, |
| "learning_rate": 1.999947925934761e-07, |
| "loss": 1.1013, |
| "step": 8050 |
| }, |
| { |
| "epoch": 0.007424267406268624, |
| "grad_norm": 18.808349609375, |
| "learning_rate": 1.9999477781034313e-07, |
| "loss": 1.041, |
| "step": 8060 |
| }, |
| { |
| "epoch": 0.0074334786561523316, |
| "grad_norm": 11.54647159576416, |
| "learning_rate": 1.999947630062568e-07, |
| "loss": 1.124, |
| "step": 8070 |
| }, |
| { |
| "epoch": 0.00744268990603604, |
| "grad_norm": 11.929420471191406, |
| "learning_rate": 1.9999474818121706e-07, |
| "loss": 1.0929, |
| "step": 8080 |
| }, |
| { |
| "epoch": 0.007451901155919748, |
| "grad_norm": 11.030402183532715, |
| "learning_rate": 1.9999473333522395e-07, |
| "loss": 1.1091, |
| "step": 8090 |
| }, |
| { |
| "epoch": 0.007461112405803456, |
| "grad_norm": 10.330012321472168, |
| "learning_rate": 1.9999471846827745e-07, |
| "loss": 1.0553, |
| "step": 8100 |
| }, |
| { |
| "epoch": 0.0074703236556871635, |
| "grad_norm": 10.526143074035645, |
| "learning_rate": 1.9999470358037757e-07, |
| "loss": 1.0578, |
| "step": 8110 |
| }, |
| { |
| "epoch": 0.007479534905570872, |
| "grad_norm": 12.601168632507324, |
| "learning_rate": 1.9999468867152432e-07, |
| "loss": 1.1277, |
| "step": 8120 |
| }, |
| { |
| "epoch": 0.00748874615545458, |
| "grad_norm": 10.997840881347656, |
| "learning_rate": 1.999946737417177e-07, |
| "loss": 1.0973, |
| "step": 8130 |
| }, |
| { |
| "epoch": 0.007497957405338288, |
| "grad_norm": 10.950722694396973, |
| "learning_rate": 1.999946587909577e-07, |
| "loss": 1.1411, |
| "step": 8140 |
| }, |
| { |
| "epoch": 0.007507168655221995, |
| "grad_norm": 11.305554389953613, |
| "learning_rate": 1.9999464381924435e-07, |
| "loss": 1.1277, |
| "step": 8150 |
| }, |
| { |
| "epoch": 0.007516379905105704, |
| "grad_norm": 11.543803215026855, |
| "learning_rate": 1.9999462882657762e-07, |
| "loss": 1.0786, |
| "step": 8160 |
| }, |
| { |
| "epoch": 0.007525591154989412, |
| "grad_norm": 10.588018417358398, |
| "learning_rate": 1.9999461381295754e-07, |
| "loss": 1.0556, |
| "step": 8170 |
| }, |
| { |
| "epoch": 0.00753480240487312, |
| "grad_norm": 11.241497039794922, |
| "learning_rate": 1.9999459877838409e-07, |
| "loss": 1.0868, |
| "step": 8180 |
| }, |
| { |
| "epoch": 0.007544013654756827, |
| "grad_norm": 12.166845321655273, |
| "learning_rate": 1.999945837228573e-07, |
| "loss": 1.1032, |
| "step": 8190 |
| }, |
| { |
| "epoch": 0.007553224904640536, |
| "grad_norm": 12.816760063171387, |
| "learning_rate": 1.9999456864637715e-07, |
| "loss": 1.0838, |
| "step": 8200 |
| }, |
| { |
| "epoch": 0.007562436154524244, |
| "grad_norm": 11.980488777160645, |
| "learning_rate": 1.9999455354894366e-07, |
| "loss": 1.0591, |
| "step": 8210 |
| }, |
| { |
| "epoch": 0.0075716474044079515, |
| "grad_norm": 12.990999221801758, |
| "learning_rate": 1.999945384305568e-07, |
| "loss": 1.1435, |
| "step": 8220 |
| }, |
| { |
| "epoch": 0.007580858654291659, |
| "grad_norm": 11.558279037475586, |
| "learning_rate": 1.9999452329121663e-07, |
| "loss": 1.144, |
| "step": 8230 |
| }, |
| { |
| "epoch": 0.007590069904175368, |
| "grad_norm": 10.841493606567383, |
| "learning_rate": 1.999945081309231e-07, |
| "loss": 1.0816, |
| "step": 8240 |
| }, |
| { |
| "epoch": 0.007599281154059076, |
| "grad_norm": 39.162574768066406, |
| "learning_rate": 1.9999449294967626e-07, |
| "loss": 1.1183, |
| "step": 8250 |
| }, |
| { |
| "epoch": 0.007608492403942783, |
| "grad_norm": 10.853628158569336, |
| "learning_rate": 1.9999447774747608e-07, |
| "loss": 1.0929, |
| "step": 8260 |
| }, |
| { |
| "epoch": 0.007617703653826491, |
| "grad_norm": 10.833484649658203, |
| "learning_rate": 1.9999446252432252e-07, |
| "loss": 1.0915, |
| "step": 8270 |
| }, |
| { |
| "epoch": 0.007626914903710199, |
| "grad_norm": 11.263511657714844, |
| "learning_rate": 1.999944472802157e-07, |
| "loss": 1.125, |
| "step": 8280 |
| }, |
| { |
| "epoch": 0.007636126153593908, |
| "grad_norm": 11.13198471069336, |
| "learning_rate": 1.9999443201515553e-07, |
| "loss": 1.1051, |
| "step": 8290 |
| }, |
| { |
| "epoch": 0.007645337403477615, |
| "grad_norm": 11.556886672973633, |
| "learning_rate": 1.9999441672914204e-07, |
| "loss": 1.0732, |
| "step": 8300 |
| }, |
| { |
| "epoch": 0.007654548653361323, |
| "grad_norm": 27.989442825317383, |
| "learning_rate": 1.9999440142217523e-07, |
| "loss": 1.1284, |
| "step": 8310 |
| }, |
| { |
| "epoch": 0.007663759903245031, |
| "grad_norm": 13.482211112976074, |
| "learning_rate": 1.9999438609425513e-07, |
| "loss": 1.0953, |
| "step": 8320 |
| }, |
| { |
| "epoch": 0.0076729711531287395, |
| "grad_norm": 11.242138862609863, |
| "learning_rate": 1.9999437074538172e-07, |
| "loss": 1.0905, |
| "step": 8330 |
| }, |
| { |
| "epoch": 0.007682182403012447, |
| "grad_norm": 11.400861740112305, |
| "learning_rate": 1.9999435537555498e-07, |
| "loss": 1.0815, |
| "step": 8340 |
| }, |
| { |
| "epoch": 0.007691393652896155, |
| "grad_norm": 10.671701431274414, |
| "learning_rate": 1.9999433998477495e-07, |
| "loss": 1.096, |
| "step": 8350 |
| }, |
| { |
| "epoch": 0.007700604902779863, |
| "grad_norm": 9.893141746520996, |
| "learning_rate": 1.999943245730416e-07, |
| "loss": 1.0586, |
| "step": 8360 |
| }, |
| { |
| "epoch": 0.007709816152663571, |
| "grad_norm": 12.050461769104004, |
| "learning_rate": 1.9999430914035497e-07, |
| "loss": 1.0821, |
| "step": 8370 |
| }, |
| { |
| "epoch": 0.007719027402547279, |
| "grad_norm": 17.062572479248047, |
| "learning_rate": 1.9999429368671506e-07, |
| "loss": 1.0947, |
| "step": 8380 |
| }, |
| { |
| "epoch": 0.007728238652430987, |
| "grad_norm": 10.553415298461914, |
| "learning_rate": 1.999942782121218e-07, |
| "loss": 1.0534, |
| "step": 8390 |
| }, |
| { |
| "epoch": 0.007737449902314695, |
| "grad_norm": 11.457592964172363, |
| "learning_rate": 1.9999426271657532e-07, |
| "loss": 1.0932, |
| "step": 8400 |
| }, |
| { |
| "epoch": 0.007746661152198403, |
| "grad_norm": 12.018735885620117, |
| "learning_rate": 1.9999424720007557e-07, |
| "loss": 1.0894, |
| "step": 8410 |
| }, |
| { |
| "epoch": 0.007755872402082111, |
| "grad_norm": 10.82856273651123, |
| "learning_rate": 1.999942316626225e-07, |
| "loss": 1.0928, |
| "step": 8420 |
| }, |
| { |
| "epoch": 0.007765083651965819, |
| "grad_norm": 12.359776496887207, |
| "learning_rate": 1.9999421610421613e-07, |
| "loss": 1.1049, |
| "step": 8430 |
| }, |
| { |
| "epoch": 0.007774294901849527, |
| "grad_norm": 11.891792297363281, |
| "learning_rate": 1.9999420052485655e-07, |
| "loss": 1.0607, |
| "step": 8440 |
| }, |
| { |
| "epoch": 0.007783506151733234, |
| "grad_norm": 11.3504638671875, |
| "learning_rate": 1.9999418492454363e-07, |
| "loss": 1.0824, |
| "step": 8450 |
| }, |
| { |
| "epoch": 0.007792717401616943, |
| "grad_norm": 11.335362434387207, |
| "learning_rate": 1.999941693032775e-07, |
| "loss": 1.1133, |
| "step": 8460 |
| }, |
| { |
| "epoch": 0.007801928651500651, |
| "grad_norm": 11.447676658630371, |
| "learning_rate": 1.9999415366105806e-07, |
| "loss": 1.0605, |
| "step": 8470 |
| }, |
| { |
| "epoch": 0.007811139901384359, |
| "grad_norm": 12.361418724060059, |
| "learning_rate": 1.999941379978854e-07, |
| "loss": 1.0673, |
| "step": 8480 |
| }, |
| { |
| "epoch": 0.007820351151268067, |
| "grad_norm": 11.360335350036621, |
| "learning_rate": 1.9999412231375943e-07, |
| "loss": 1.0663, |
| "step": 8490 |
| }, |
| { |
| "epoch": 0.007829562401151775, |
| "grad_norm": 11.409963607788086, |
| "learning_rate": 1.9999410660868023e-07, |
| "loss": 1.0989, |
| "step": 8500 |
| }, |
| { |
| "epoch": 0.007838773651035483, |
| "grad_norm": 10.656011581420898, |
| "learning_rate": 1.999940908826478e-07, |
| "loss": 1.1032, |
| "step": 8510 |
| }, |
| { |
| "epoch": 0.00784798490091919, |
| "grad_norm": 11.441688537597656, |
| "learning_rate": 1.999940751356621e-07, |
| "loss": 1.0405, |
| "step": 8520 |
| }, |
| { |
| "epoch": 0.007857196150802898, |
| "grad_norm": 11.79773998260498, |
| "learning_rate": 1.9999405936772317e-07, |
| "loss": 1.0775, |
| "step": 8530 |
| }, |
| { |
| "epoch": 0.007866407400686606, |
| "grad_norm": 11.805946350097656, |
| "learning_rate": 1.9999404357883098e-07, |
| "loss": 1.0834, |
| "step": 8540 |
| }, |
| { |
| "epoch": 0.007875618650570314, |
| "grad_norm": 13.176393508911133, |
| "learning_rate": 1.999940277689856e-07, |
| "loss": 1.0532, |
| "step": 8550 |
| }, |
| { |
| "epoch": 0.007884829900454023, |
| "grad_norm": 10.132299423217773, |
| "learning_rate": 1.9999401193818692e-07, |
| "loss": 1.084, |
| "step": 8560 |
| }, |
| { |
| "epoch": 0.007894041150337731, |
| "grad_norm": 12.019964218139648, |
| "learning_rate": 1.9999399608643505e-07, |
| "loss": 1.05, |
| "step": 8570 |
| }, |
| { |
| "epoch": 0.007903252400221439, |
| "grad_norm": 12.41136646270752, |
| "learning_rate": 1.9999398021372996e-07, |
| "loss": 1.1125, |
| "step": 8580 |
| }, |
| { |
| "epoch": 0.007912463650105147, |
| "grad_norm": 11.242287635803223, |
| "learning_rate": 1.999939643200716e-07, |
| "loss": 1.0843, |
| "step": 8590 |
| }, |
| { |
| "epoch": 0.007921674899988854, |
| "grad_norm": 11.359539985656738, |
| "learning_rate": 1.9999394840546004e-07, |
| "loss": 1.0703, |
| "step": 8600 |
| }, |
| { |
| "epoch": 0.007930886149872562, |
| "grad_norm": 11.71211051940918, |
| "learning_rate": 1.999939324698953e-07, |
| "loss": 1.0691, |
| "step": 8610 |
| }, |
| { |
| "epoch": 0.00794009739975627, |
| "grad_norm": 12.759934425354004, |
| "learning_rate": 1.9999391651337727e-07, |
| "loss": 1.0849, |
| "step": 8620 |
| }, |
| { |
| "epoch": 0.007949308649639978, |
| "grad_norm": 14.097381591796875, |
| "learning_rate": 1.999939005359061e-07, |
| "loss": 1.055, |
| "step": 8630 |
| }, |
| { |
| "epoch": 0.007958519899523685, |
| "grad_norm": 17.369016647338867, |
| "learning_rate": 1.9999388453748167e-07, |
| "loss": 1.0619, |
| "step": 8640 |
| }, |
| { |
| "epoch": 0.007967731149407395, |
| "grad_norm": 12.619816780090332, |
| "learning_rate": 1.9999386851810407e-07, |
| "loss": 1.0528, |
| "step": 8650 |
| }, |
| { |
| "epoch": 0.007976942399291103, |
| "grad_norm": 11.299243927001953, |
| "learning_rate": 1.9999385247777325e-07, |
| "loss": 1.0256, |
| "step": 8660 |
| }, |
| { |
| "epoch": 0.00798615364917481, |
| "grad_norm": 11.641725540161133, |
| "learning_rate": 1.9999383641648925e-07, |
| "loss": 1.066, |
| "step": 8670 |
| }, |
| { |
| "epoch": 0.007995364899058518, |
| "grad_norm": 12.779623985290527, |
| "learning_rate": 1.9999382033425203e-07, |
| "loss": 1.0743, |
| "step": 8680 |
| }, |
| { |
| "epoch": 0.008004576148942226, |
| "grad_norm": 12.839458465576172, |
| "learning_rate": 1.9999380423106166e-07, |
| "loss": 1.0949, |
| "step": 8690 |
| }, |
| { |
| "epoch": 0.008013787398825934, |
| "grad_norm": 11.303932189941406, |
| "learning_rate": 1.9999378810691804e-07, |
| "loss": 1.133, |
| "step": 8700 |
| }, |
| { |
| "epoch": 0.008022998648709641, |
| "grad_norm": 10.806841850280762, |
| "learning_rate": 1.999937719618213e-07, |
| "loss": 1.0998, |
| "step": 8710 |
| }, |
| { |
| "epoch": 0.00803220989859335, |
| "grad_norm": 11.759305953979492, |
| "learning_rate": 1.9999375579577133e-07, |
| "loss": 1.0742, |
| "step": 8720 |
| }, |
| { |
| "epoch": 0.008041421148477059, |
| "grad_norm": 11.412139892578125, |
| "learning_rate": 1.999937396087682e-07, |
| "loss": 1.0659, |
| "step": 8730 |
| }, |
| { |
| "epoch": 0.008050632398360767, |
| "grad_norm": 18.34227752685547, |
| "learning_rate": 1.999937234008119e-07, |
| "loss": 1.0684, |
| "step": 8740 |
| }, |
| { |
| "epoch": 0.008059843648244474, |
| "grad_norm": 11.488326072692871, |
| "learning_rate": 1.9999370717190243e-07, |
| "loss": 1.0718, |
| "step": 8750 |
| }, |
| { |
| "epoch": 0.008069054898128182, |
| "grad_norm": 10.723801612854004, |
| "learning_rate": 1.999936909220398e-07, |
| "loss": 1.1355, |
| "step": 8760 |
| }, |
| { |
| "epoch": 0.00807826614801189, |
| "grad_norm": 11.560935974121094, |
| "learning_rate": 1.99993674651224e-07, |
| "loss": 1.0813, |
| "step": 8770 |
| }, |
| { |
| "epoch": 0.008087477397895598, |
| "grad_norm": 11.844764709472656, |
| "learning_rate": 1.9999365835945504e-07, |
| "loss": 1.101, |
| "step": 8780 |
| }, |
| { |
| "epoch": 0.008096688647779305, |
| "grad_norm": 11.374857902526855, |
| "learning_rate": 1.9999364204673288e-07, |
| "loss": 1.0962, |
| "step": 8790 |
| }, |
| { |
| "epoch": 0.008105899897663013, |
| "grad_norm": 10.651556968688965, |
| "learning_rate": 1.9999362571305761e-07, |
| "loss": 1.0634, |
| "step": 8800 |
| }, |
| { |
| "epoch": 0.008115111147546723, |
| "grad_norm": 11.097898483276367, |
| "learning_rate": 1.9999360935842919e-07, |
| "loss": 1.0626, |
| "step": 8810 |
| }, |
| { |
| "epoch": 0.00812432239743043, |
| "grad_norm": 13.289128303527832, |
| "learning_rate": 1.9999359298284763e-07, |
| "loss": 1.0994, |
| "step": 8820 |
| }, |
| { |
| "epoch": 0.008133533647314138, |
| "grad_norm": 11.332006454467773, |
| "learning_rate": 1.999935765863129e-07, |
| "loss": 1.1036, |
| "step": 8830 |
| }, |
| { |
| "epoch": 0.008142744897197846, |
| "grad_norm": 11.014249801635742, |
| "learning_rate": 1.9999356016882505e-07, |
| "loss": 1.0726, |
| "step": 8840 |
| }, |
| { |
| "epoch": 0.008151956147081554, |
| "grad_norm": 11.593423843383789, |
| "learning_rate": 1.9999354373038404e-07, |
| "loss": 1.0703, |
| "step": 8850 |
| }, |
| { |
| "epoch": 0.008161167396965261, |
| "grad_norm": 10.959020614624023, |
| "learning_rate": 1.9999352727098992e-07, |
| "loss": 1.0612, |
| "step": 8860 |
| }, |
| { |
| "epoch": 0.00817037864684897, |
| "grad_norm": 10.597652435302734, |
| "learning_rate": 1.9999351079064266e-07, |
| "loss": 1.0577, |
| "step": 8870 |
| }, |
| { |
| "epoch": 0.008179589896732677, |
| "grad_norm": 10.600388526916504, |
| "learning_rate": 1.999934942893423e-07, |
| "loss": 1.0649, |
| "step": 8880 |
| }, |
| { |
| "epoch": 0.008188801146616385, |
| "grad_norm": 9.9636812210083, |
| "learning_rate": 1.9999347776708878e-07, |
| "loss": 1.0517, |
| "step": 8890 |
| }, |
| { |
| "epoch": 0.008198012396500094, |
| "grad_norm": 10.696206092834473, |
| "learning_rate": 1.9999346122388215e-07, |
| "loss": 1.0933, |
| "step": 8900 |
| }, |
| { |
| "epoch": 0.008207223646383802, |
| "grad_norm": 11.579418182373047, |
| "learning_rate": 1.9999344465972242e-07, |
| "loss": 1.111, |
| "step": 8910 |
| }, |
| { |
| "epoch": 0.00821643489626751, |
| "grad_norm": 11.81453800201416, |
| "learning_rate": 1.9999342807460957e-07, |
| "loss": 1.0998, |
| "step": 8920 |
| }, |
| { |
| "epoch": 0.008225646146151218, |
| "grad_norm": 9.833195686340332, |
| "learning_rate": 1.9999341146854357e-07, |
| "loss": 1.0894, |
| "step": 8930 |
| }, |
| { |
| "epoch": 0.008234857396034925, |
| "grad_norm": 10.20954704284668, |
| "learning_rate": 1.9999339484152454e-07, |
| "loss": 1.0442, |
| "step": 8940 |
| }, |
| { |
| "epoch": 0.008244068645918633, |
| "grad_norm": 13.597179412841797, |
| "learning_rate": 1.9999337819355235e-07, |
| "loss": 1.0585, |
| "step": 8950 |
| }, |
| { |
| "epoch": 0.00825327989580234, |
| "grad_norm": 50.29384231567383, |
| "learning_rate": 1.9999336152462708e-07, |
| "loss": 1.1284, |
| "step": 8960 |
| }, |
| { |
| "epoch": 0.008262491145686049, |
| "grad_norm": 11.695181846618652, |
| "learning_rate": 1.9999334483474873e-07, |
| "loss": 1.1024, |
| "step": 8970 |
| }, |
| { |
| "epoch": 0.008271702395569758, |
| "grad_norm": 10.144072532653809, |
| "learning_rate": 1.9999332812391728e-07, |
| "loss": 1.0317, |
| "step": 8980 |
| }, |
| { |
| "epoch": 0.008280913645453466, |
| "grad_norm": 12.269700050354004, |
| "learning_rate": 1.9999331139213274e-07, |
| "loss": 1.084, |
| "step": 8990 |
| }, |
| { |
| "epoch": 0.008290124895337174, |
| "grad_norm": 12.896242141723633, |
| "learning_rate": 1.999932946393951e-07, |
| "loss": 1.055, |
| "step": 9000 |
| }, |
| { |
| "epoch": 0.008299336145220881, |
| "grad_norm": 12.16714096069336, |
| "learning_rate": 1.999932778657044e-07, |
| "loss": 1.0751, |
| "step": 9010 |
| }, |
| { |
| "epoch": 0.008308547395104589, |
| "grad_norm": 10.43128490447998, |
| "learning_rate": 1.9999326107106061e-07, |
| "loss": 1.1038, |
| "step": 9020 |
| }, |
| { |
| "epoch": 0.008317758644988297, |
| "grad_norm": 10.838507652282715, |
| "learning_rate": 1.9999324425546376e-07, |
| "loss": 1.0804, |
| "step": 9030 |
| }, |
| { |
| "epoch": 0.008326969894872005, |
| "grad_norm": 11.41357421875, |
| "learning_rate": 1.9999322741891381e-07, |
| "loss": 1.0692, |
| "step": 9040 |
| }, |
| { |
| "epoch": 0.008336181144755712, |
| "grad_norm": 10.528654098510742, |
| "learning_rate": 1.9999321056141082e-07, |
| "loss": 1.092, |
| "step": 9050 |
| }, |
| { |
| "epoch": 0.00834539239463942, |
| "grad_norm": 11.275156021118164, |
| "learning_rate": 1.9999319368295475e-07, |
| "loss": 1.0485, |
| "step": 9060 |
| }, |
| { |
| "epoch": 0.00835460364452313, |
| "grad_norm": 11.776030540466309, |
| "learning_rate": 1.9999317678354564e-07, |
| "loss": 1.1683, |
| "step": 9070 |
| }, |
| { |
| "epoch": 0.008363814894406837, |
| "grad_norm": 11.334040641784668, |
| "learning_rate": 1.9999315986318346e-07, |
| "loss": 1.0969, |
| "step": 9080 |
| }, |
| { |
| "epoch": 0.008373026144290545, |
| "grad_norm": 10.663966178894043, |
| "learning_rate": 1.9999314292186825e-07, |
| "loss": 1.0776, |
| "step": 9090 |
| }, |
| { |
| "epoch": 0.008382237394174253, |
| "grad_norm": 10.564156532287598, |
| "learning_rate": 1.9999312595959993e-07, |
| "loss": 1.0671, |
| "step": 9100 |
| }, |
| { |
| "epoch": 0.00839144864405796, |
| "grad_norm": 10.607227325439453, |
| "learning_rate": 1.9999310897637861e-07, |
| "loss": 1.0729, |
| "step": 9110 |
| }, |
| { |
| "epoch": 0.008400659893941669, |
| "grad_norm": 13.524459838867188, |
| "learning_rate": 1.9999309197220427e-07, |
| "loss": 1.04, |
| "step": 9120 |
| }, |
| { |
| "epoch": 0.008409871143825376, |
| "grad_norm": 9.924148559570312, |
| "learning_rate": 1.9999307494707684e-07, |
| "loss": 1.0195, |
| "step": 9130 |
| }, |
| { |
| "epoch": 0.008419082393709084, |
| "grad_norm": 11.542811393737793, |
| "learning_rate": 1.9999305790099641e-07, |
| "loss": 1.1115, |
| "step": 9140 |
| }, |
| { |
| "epoch": 0.008428293643592794, |
| "grad_norm": 13.14279556274414, |
| "learning_rate": 1.9999304083396296e-07, |
| "loss": 1.0727, |
| "step": 9150 |
| }, |
| { |
| "epoch": 0.008437504893476501, |
| "grad_norm": 11.538451194763184, |
| "learning_rate": 1.9999302374597645e-07, |
| "loss": 1.0456, |
| "step": 9160 |
| }, |
| { |
| "epoch": 0.008446716143360209, |
| "grad_norm": 11.371332168579102, |
| "learning_rate": 1.9999300663703691e-07, |
| "loss": 1.0997, |
| "step": 9170 |
| }, |
| { |
| "epoch": 0.008455927393243917, |
| "grad_norm": 10.599366188049316, |
| "learning_rate": 1.9999298950714438e-07, |
| "loss": 1.0669, |
| "step": 9180 |
| }, |
| { |
| "epoch": 0.008465138643127625, |
| "grad_norm": 147.1831817626953, |
| "learning_rate": 1.999929723562988e-07, |
| "loss": 1.089, |
| "step": 9190 |
| }, |
| { |
| "epoch": 0.008474349893011332, |
| "grad_norm": 10.78607177734375, |
| "learning_rate": 1.9999295518450025e-07, |
| "loss": 1.0246, |
| "step": 9200 |
| }, |
| { |
| "epoch": 0.00848356114289504, |
| "grad_norm": 12.30728530883789, |
| "learning_rate": 1.9999293799174865e-07, |
| "loss": 1.0562, |
| "step": 9210 |
| }, |
| { |
| "epoch": 0.008492772392778748, |
| "grad_norm": 11.619100570678711, |
| "learning_rate": 1.9999292077804406e-07, |
| "loss": 1.0656, |
| "step": 9220 |
| }, |
| { |
| "epoch": 0.008501983642662456, |
| "grad_norm": 9.434433937072754, |
| "learning_rate": 1.9999290354338646e-07, |
| "loss": 1.0588, |
| "step": 9230 |
| }, |
| { |
| "epoch": 0.008511194892546165, |
| "grad_norm": 10.201811790466309, |
| "learning_rate": 1.9999288628777586e-07, |
| "loss": 1.0893, |
| "step": 9240 |
| }, |
| { |
| "epoch": 0.008520406142429873, |
| "grad_norm": 10.75688648223877, |
| "learning_rate": 1.9999286901121232e-07, |
| "loss": 0.9994, |
| "step": 9250 |
| }, |
| { |
| "epoch": 0.00852961739231358, |
| "grad_norm": 12.294363021850586, |
| "learning_rate": 1.9999285171369572e-07, |
| "loss": 1.086, |
| "step": 9260 |
| }, |
| { |
| "epoch": 0.008538828642197288, |
| "grad_norm": 10.840588569641113, |
| "learning_rate": 1.9999283439522615e-07, |
| "loss": 1.0661, |
| "step": 9270 |
| }, |
| { |
| "epoch": 0.008548039892080996, |
| "grad_norm": 13.13080883026123, |
| "learning_rate": 1.9999281705580357e-07, |
| "loss": 1.0969, |
| "step": 9280 |
| }, |
| { |
| "epoch": 0.008557251141964704, |
| "grad_norm": 12.541131973266602, |
| "learning_rate": 1.9999279969542807e-07, |
| "loss": 1.0719, |
| "step": 9290 |
| }, |
| { |
| "epoch": 0.008566462391848412, |
| "grad_norm": 11.553109169006348, |
| "learning_rate": 1.9999278231409955e-07, |
| "loss": 1.0542, |
| "step": 9300 |
| }, |
| { |
| "epoch": 0.00857567364173212, |
| "grad_norm": 10.858327865600586, |
| "learning_rate": 1.9999276491181808e-07, |
| "loss": 1.0617, |
| "step": 9310 |
| }, |
| { |
| "epoch": 0.008584884891615829, |
| "grad_norm": 11.022598266601562, |
| "learning_rate": 1.999927474885836e-07, |
| "loss": 1.0526, |
| "step": 9320 |
| }, |
| { |
| "epoch": 0.008594096141499537, |
| "grad_norm": 11.471165657043457, |
| "learning_rate": 1.9999273004439618e-07, |
| "loss": 1.045, |
| "step": 9330 |
| }, |
| { |
| "epoch": 0.008603307391383245, |
| "grad_norm": 17.085573196411133, |
| "learning_rate": 1.999927125792558e-07, |
| "loss": 1.0653, |
| "step": 9340 |
| }, |
| { |
| "epoch": 0.008612518641266952, |
| "grad_norm": 12.661412239074707, |
| "learning_rate": 1.9999269509316247e-07, |
| "loss": 1.0932, |
| "step": 9350 |
| }, |
| { |
| "epoch": 0.00862172989115066, |
| "grad_norm": 12.632064819335938, |
| "learning_rate": 1.9999267758611618e-07, |
| "loss": 1.0962, |
| "step": 9360 |
| }, |
| { |
| "epoch": 0.008630941141034368, |
| "grad_norm": 10.689151763916016, |
| "learning_rate": 1.999926600581169e-07, |
| "loss": 1.0536, |
| "step": 9370 |
| }, |
| { |
| "epoch": 0.008640152390918076, |
| "grad_norm": 11.263710975646973, |
| "learning_rate": 1.9999264250916472e-07, |
| "loss": 1.061, |
| "step": 9380 |
| }, |
| { |
| "epoch": 0.008649363640801783, |
| "grad_norm": 10.958022117614746, |
| "learning_rate": 1.9999262493925956e-07, |
| "loss": 1.0561, |
| "step": 9390 |
| }, |
| { |
| "epoch": 0.008658574890685493, |
| "grad_norm": 12.540989875793457, |
| "learning_rate": 1.999926073484015e-07, |
| "loss": 1.0657, |
| "step": 9400 |
| }, |
| { |
| "epoch": 0.0086677861405692, |
| "grad_norm": 10.894880294799805, |
| "learning_rate": 1.9999258973659047e-07, |
| "loss": 1.0658, |
| "step": 9410 |
| }, |
| { |
| "epoch": 0.008676997390452908, |
| "grad_norm": 13.033790588378906, |
| "learning_rate": 1.999925721038265e-07, |
| "loss": 1.0419, |
| "step": 9420 |
| }, |
| { |
| "epoch": 0.008686208640336616, |
| "grad_norm": 11.394380569458008, |
| "learning_rate": 1.9999255445010964e-07, |
| "loss": 1.1079, |
| "step": 9430 |
| }, |
| { |
| "epoch": 0.008695419890220324, |
| "grad_norm": 10.95514965057373, |
| "learning_rate": 1.9999253677543984e-07, |
| "loss": 1.0424, |
| "step": 9440 |
| }, |
| { |
| "epoch": 0.008704631140104032, |
| "grad_norm": 11.140457153320312, |
| "learning_rate": 1.9999251907981712e-07, |
| "loss": 1.1059, |
| "step": 9450 |
| }, |
| { |
| "epoch": 0.00871384238998774, |
| "grad_norm": 10.803711891174316, |
| "learning_rate": 1.9999250136324145e-07, |
| "loss": 1.0246, |
| "step": 9460 |
| }, |
| { |
| "epoch": 0.008723053639871447, |
| "grad_norm": 10.29238510131836, |
| "learning_rate": 1.9999248362571289e-07, |
| "loss": 1.0822, |
| "step": 9470 |
| }, |
| { |
| "epoch": 0.008732264889755155, |
| "grad_norm": 11.502205848693848, |
| "learning_rate": 1.999924658672314e-07, |
| "loss": 1.1197, |
| "step": 9480 |
| }, |
| { |
| "epoch": 0.008741476139638864, |
| "grad_norm": 11.76965618133545, |
| "learning_rate": 1.9999244808779702e-07, |
| "loss": 1.0492, |
| "step": 9490 |
| }, |
| { |
| "epoch": 0.008750687389522572, |
| "grad_norm": 10.714384078979492, |
| "learning_rate": 1.9999243028740972e-07, |
| "loss": 1.0533, |
| "step": 9500 |
| }, |
| { |
| "epoch": 0.00875989863940628, |
| "grad_norm": 11.008478164672852, |
| "learning_rate": 1.9999241246606955e-07, |
| "loss": 1.0832, |
| "step": 9510 |
| }, |
| { |
| "epoch": 0.008769109889289988, |
| "grad_norm": 10.359647750854492, |
| "learning_rate": 1.9999239462377648e-07, |
| "loss": 1.0551, |
| "step": 9520 |
| }, |
| { |
| "epoch": 0.008778321139173696, |
| "grad_norm": 10.09439754486084, |
| "learning_rate": 1.999923767605305e-07, |
| "loss": 1.0314, |
| "step": 9530 |
| }, |
| { |
| "epoch": 0.008787532389057403, |
| "grad_norm": 12.28543472290039, |
| "learning_rate": 1.9999235887633167e-07, |
| "loss": 1.0385, |
| "step": 9540 |
| }, |
| { |
| "epoch": 0.008796743638941111, |
| "grad_norm": 11.33259391784668, |
| "learning_rate": 1.999923409711799e-07, |
| "loss": 1.0599, |
| "step": 9550 |
| }, |
| { |
| "epoch": 0.008805954888824819, |
| "grad_norm": 10.183274269104004, |
| "learning_rate": 1.9999232304507524e-07, |
| "loss": 1.0351, |
| "step": 9560 |
| }, |
| { |
| "epoch": 0.008815166138708528, |
| "grad_norm": 11.242117881774902, |
| "learning_rate": 1.9999230509801776e-07, |
| "loss": 1.1069, |
| "step": 9570 |
| }, |
| { |
| "epoch": 0.008824377388592236, |
| "grad_norm": 16.519079208374023, |
| "learning_rate": 1.9999228713000737e-07, |
| "loss": 1.0785, |
| "step": 9580 |
| }, |
| { |
| "epoch": 0.008833588638475944, |
| "grad_norm": 12.673565864562988, |
| "learning_rate": 1.9999226914104415e-07, |
| "loss": 1.0436, |
| "step": 9590 |
| }, |
| { |
| "epoch": 0.008842799888359652, |
| "grad_norm": 11.839251518249512, |
| "learning_rate": 1.99992251131128e-07, |
| "loss": 1.0878, |
| "step": 9600 |
| }, |
| { |
| "epoch": 0.00885201113824336, |
| "grad_norm": 10.931118965148926, |
| "learning_rate": 1.9999223310025903e-07, |
| "loss": 1.1125, |
| "step": 9610 |
| }, |
| { |
| "epoch": 0.008861222388127067, |
| "grad_norm": 11.007862091064453, |
| "learning_rate": 1.999922150484372e-07, |
| "loss": 1.062, |
| "step": 9620 |
| }, |
| { |
| "epoch": 0.008870433638010775, |
| "grad_norm": 10.936423301696777, |
| "learning_rate": 1.9999219697566253e-07, |
| "loss": 1.1171, |
| "step": 9630 |
| }, |
| { |
| "epoch": 0.008879644887894483, |
| "grad_norm": 12.802664756774902, |
| "learning_rate": 1.9999217888193499e-07, |
| "loss": 1.0819, |
| "step": 9640 |
| }, |
| { |
| "epoch": 0.00888885613777819, |
| "grad_norm": 13.950556755065918, |
| "learning_rate": 1.9999216076725455e-07, |
| "loss": 1.0978, |
| "step": 9650 |
| }, |
| { |
| "epoch": 0.0088980673876619, |
| "grad_norm": 13.137720108032227, |
| "learning_rate": 1.9999214263162135e-07, |
| "loss": 1.1422, |
| "step": 9660 |
| }, |
| { |
| "epoch": 0.008907278637545608, |
| "grad_norm": 12.493955612182617, |
| "learning_rate": 1.9999212447503528e-07, |
| "loss": 1.0574, |
| "step": 9670 |
| }, |
| { |
| "epoch": 0.008916489887429315, |
| "grad_norm": 12.361075401306152, |
| "learning_rate": 1.9999210629749636e-07, |
| "loss": 1.0808, |
| "step": 9680 |
| }, |
| { |
| "epoch": 0.008925701137313023, |
| "grad_norm": 10.407599449157715, |
| "learning_rate": 1.999920880990046e-07, |
| "loss": 1.0782, |
| "step": 9690 |
| }, |
| { |
| "epoch": 0.008934912387196731, |
| "grad_norm": 10.881622314453125, |
| "learning_rate": 1.9999206987956007e-07, |
| "loss": 1.0988, |
| "step": 9700 |
| }, |
| { |
| "epoch": 0.008944123637080439, |
| "grad_norm": 10.24425220489502, |
| "learning_rate": 1.9999205163916265e-07, |
| "loss": 1.0339, |
| "step": 9710 |
| }, |
| { |
| "epoch": 0.008953334886964147, |
| "grad_norm": 11.523407936096191, |
| "learning_rate": 1.9999203337781245e-07, |
| "loss": 1.0782, |
| "step": 9720 |
| }, |
| { |
| "epoch": 0.008962546136847854, |
| "grad_norm": 12.510161399841309, |
| "learning_rate": 1.999920150955094e-07, |
| "loss": 1.0746, |
| "step": 9730 |
| }, |
| { |
| "epoch": 0.008971757386731564, |
| "grad_norm": 12.185508728027344, |
| "learning_rate": 1.9999199679225355e-07, |
| "loss": 1.0676, |
| "step": 9740 |
| }, |
| { |
| "epoch": 0.008980968636615272, |
| "grad_norm": 11.52450180053711, |
| "learning_rate": 1.9999197846804492e-07, |
| "loss": 1.0417, |
| "step": 9750 |
| }, |
| { |
| "epoch": 0.00899017988649898, |
| "grad_norm": 10.623550415039062, |
| "learning_rate": 1.9999196012288346e-07, |
| "loss": 1.067, |
| "step": 9760 |
| }, |
| { |
| "epoch": 0.008999391136382687, |
| "grad_norm": 10.873220443725586, |
| "learning_rate": 1.999919417567692e-07, |
| "loss": 1.0894, |
| "step": 9770 |
| }, |
| { |
| "epoch": 0.009008602386266395, |
| "grad_norm": 12.051067352294922, |
| "learning_rate": 1.9999192336970213e-07, |
| "loss": 1.0223, |
| "step": 9780 |
| }, |
| { |
| "epoch": 0.009017813636150103, |
| "grad_norm": 10.203484535217285, |
| "learning_rate": 1.999919049616823e-07, |
| "loss": 1.0803, |
| "step": 9790 |
| }, |
| { |
| "epoch": 0.00902702488603381, |
| "grad_norm": 127.2802963256836, |
| "learning_rate": 1.9999188653270965e-07, |
| "loss": 1.0724, |
| "step": 9800 |
| }, |
| { |
| "epoch": 0.009036236135917518, |
| "grad_norm": 13.264669418334961, |
| "learning_rate": 1.9999186808278424e-07, |
| "loss": 1.0494, |
| "step": 9810 |
| }, |
| { |
| "epoch": 0.009045447385801226, |
| "grad_norm": 10.563486099243164, |
| "learning_rate": 1.9999184961190601e-07, |
| "loss": 1.0567, |
| "step": 9820 |
| }, |
| { |
| "epoch": 0.009054658635684935, |
| "grad_norm": 11.760886192321777, |
| "learning_rate": 1.9999183112007505e-07, |
| "loss": 1.0557, |
| "step": 9830 |
| }, |
| { |
| "epoch": 0.009063869885568643, |
| "grad_norm": 18.59585952758789, |
| "learning_rate": 1.9999181260729127e-07, |
| "loss": 1.1102, |
| "step": 9840 |
| }, |
| { |
| "epoch": 0.009073081135452351, |
| "grad_norm": 12.069076538085938, |
| "learning_rate": 1.9999179407355476e-07, |
| "loss": 1.0551, |
| "step": 9850 |
| }, |
| { |
| "epoch": 0.009082292385336059, |
| "grad_norm": 11.18621826171875, |
| "learning_rate": 1.9999177551886545e-07, |
| "loss": 1.0923, |
| "step": 9860 |
| }, |
| { |
| "epoch": 0.009091503635219766, |
| "grad_norm": 12.968023300170898, |
| "learning_rate": 1.9999175694322338e-07, |
| "loss": 1.0638, |
| "step": 9870 |
| }, |
| { |
| "epoch": 0.009100714885103474, |
| "grad_norm": 10.560938835144043, |
| "learning_rate": 1.9999173834662858e-07, |
| "loss": 1.0454, |
| "step": 9880 |
| }, |
| { |
| "epoch": 0.009109926134987182, |
| "grad_norm": 10.572060585021973, |
| "learning_rate": 1.99991719729081e-07, |
| "loss": 1.0659, |
| "step": 9890 |
| }, |
| { |
| "epoch": 0.00911913738487089, |
| "grad_norm": 12.31320571899414, |
| "learning_rate": 1.9999170109058068e-07, |
| "loss": 1.074, |
| "step": 9900 |
| }, |
| { |
| "epoch": 0.0091283486347546, |
| "grad_norm": 15.759599685668945, |
| "learning_rate": 1.9999168243112761e-07, |
| "loss": 1.0546, |
| "step": 9910 |
| }, |
| { |
| "epoch": 0.009137559884638307, |
| "grad_norm": 10.657308578491211, |
| "learning_rate": 1.9999166375072178e-07, |
| "loss": 1.0739, |
| "step": 9920 |
| }, |
| { |
| "epoch": 0.009146771134522015, |
| "grad_norm": 10.577692985534668, |
| "learning_rate": 1.9999164504936324e-07, |
| "loss": 1.0127, |
| "step": 9930 |
| }, |
| { |
| "epoch": 0.009155982384405723, |
| "grad_norm": 14.53947639465332, |
| "learning_rate": 1.9999162632705194e-07, |
| "loss": 1.0579, |
| "step": 9940 |
| }, |
| { |
| "epoch": 0.00916519363428943, |
| "grad_norm": 10.085494041442871, |
| "learning_rate": 1.9999160758378793e-07, |
| "loss": 1.0168, |
| "step": 9950 |
| }, |
| { |
| "epoch": 0.009174404884173138, |
| "grad_norm": 11.186735153198242, |
| "learning_rate": 1.9999158881957116e-07, |
| "loss": 1.0795, |
| "step": 9960 |
| }, |
| { |
| "epoch": 0.009183616134056846, |
| "grad_norm": 14.622456550598145, |
| "learning_rate": 1.999915700344017e-07, |
| "loss": 1.0782, |
| "step": 9970 |
| }, |
| { |
| "epoch": 0.009192827383940554, |
| "grad_norm": 11.971503257751465, |
| "learning_rate": 1.999915512282795e-07, |
| "loss": 1.0538, |
| "step": 9980 |
| }, |
| { |
| "epoch": 0.009202038633824263, |
| "grad_norm": 11.950106620788574, |
| "learning_rate": 1.999915324012046e-07, |
| "loss": 1.1311, |
| "step": 9990 |
| }, |
| { |
| "epoch": 0.00921124988370797, |
| "grad_norm": 12.671368598937988, |
| "learning_rate": 1.9999151355317698e-07, |
| "loss": 1.0924, |
| "step": 10000 |
| }, |
| { |
| "epoch": 0.009220461133591679, |
| "grad_norm": 12.27900505065918, |
| "learning_rate": 1.9999149468419666e-07, |
| "loss": 1.0229, |
| "step": 10010 |
| }, |
| { |
| "epoch": 0.009229672383475386, |
| "grad_norm": 10.733115196228027, |
| "learning_rate": 1.9999147579426362e-07, |
| "loss": 1.0659, |
| "step": 10020 |
| }, |
| { |
| "epoch": 0.009238883633359094, |
| "grad_norm": 13.622121810913086, |
| "learning_rate": 1.999914568833779e-07, |
| "loss": 1.0278, |
| "step": 10030 |
| }, |
| { |
| "epoch": 0.009248094883242802, |
| "grad_norm": 10.59408187866211, |
| "learning_rate": 1.9999143795153948e-07, |
| "loss": 1.0517, |
| "step": 10040 |
| }, |
| { |
| "epoch": 0.00925730613312651, |
| "grad_norm": 11.185957908630371, |
| "learning_rate": 1.9999141899874834e-07, |
| "loss": 1.0966, |
| "step": 10050 |
| }, |
| { |
| "epoch": 0.009266517383010217, |
| "grad_norm": 10.468329429626465, |
| "learning_rate": 1.9999140002500457e-07, |
| "loss": 1.0735, |
| "step": 10060 |
| }, |
| { |
| "epoch": 0.009275728632893925, |
| "grad_norm": 10.570304870605469, |
| "learning_rate": 1.9999138103030807e-07, |
| "loss": 1.0997, |
| "step": 10070 |
| }, |
| { |
| "epoch": 0.009284939882777635, |
| "grad_norm": 10.814870834350586, |
| "learning_rate": 1.9999136201465888e-07, |
| "loss": 1.0373, |
| "step": 10080 |
| }, |
| { |
| "epoch": 0.009294151132661342, |
| "grad_norm": 10.930500030517578, |
| "learning_rate": 1.9999134297805704e-07, |
| "loss": 1.0703, |
| "step": 10090 |
| }, |
| { |
| "epoch": 0.00930336238254505, |
| "grad_norm": 10.6019287109375, |
| "learning_rate": 1.9999132392050253e-07, |
| "loss": 1.0763, |
| "step": 10100 |
| }, |
| { |
| "epoch": 0.009312573632428758, |
| "grad_norm": 10.920812606811523, |
| "learning_rate": 1.9999130484199532e-07, |
| "loss": 1.0706, |
| "step": 10110 |
| }, |
| { |
| "epoch": 0.009321784882312466, |
| "grad_norm": 11.809261322021484, |
| "learning_rate": 1.9999128574253548e-07, |
| "loss": 1.0673, |
| "step": 10120 |
| }, |
| { |
| "epoch": 0.009330996132196174, |
| "grad_norm": 11.189637184143066, |
| "learning_rate": 1.9999126662212298e-07, |
| "loss": 1.0556, |
| "step": 10130 |
| }, |
| { |
| "epoch": 0.009340207382079881, |
| "grad_norm": 13.285804748535156, |
| "learning_rate": 1.9999124748075783e-07, |
| "loss": 1.0557, |
| "step": 10140 |
| }, |
| { |
| "epoch": 0.009349418631963589, |
| "grad_norm": 12.155187606811523, |
| "learning_rate": 1.9999122831843999e-07, |
| "loss": 1.0653, |
| "step": 10150 |
| }, |
| { |
| "epoch": 0.009358629881847299, |
| "grad_norm": 10.259940147399902, |
| "learning_rate": 1.9999120913516952e-07, |
| "loss": 1.0167, |
| "step": 10160 |
| }, |
| { |
| "epoch": 0.009367841131731006, |
| "grad_norm": 12.034541130065918, |
| "learning_rate": 1.9999118993094641e-07, |
| "loss": 1.1231, |
| "step": 10170 |
| }, |
| { |
| "epoch": 0.009377052381614714, |
| "grad_norm": 11.056714057922363, |
| "learning_rate": 1.9999117070577066e-07, |
| "loss": 1.0533, |
| "step": 10180 |
| }, |
| { |
| "epoch": 0.009386263631498422, |
| "grad_norm": 11.024518966674805, |
| "learning_rate": 1.9999115145964227e-07, |
| "loss": 1.0533, |
| "step": 10190 |
| }, |
| { |
| "epoch": 0.00939547488138213, |
| "grad_norm": 11.203171730041504, |
| "learning_rate": 1.9999113219256128e-07, |
| "loss": 1.0558, |
| "step": 10200 |
| }, |
| { |
| "epoch": 0.009404686131265837, |
| "grad_norm": 11.430652618408203, |
| "learning_rate": 1.9999111290452763e-07, |
| "loss": 1.0432, |
| "step": 10210 |
| }, |
| { |
| "epoch": 0.009413897381149545, |
| "grad_norm": 11.131461143493652, |
| "learning_rate": 1.9999109359554135e-07, |
| "loss": 1.0335, |
| "step": 10220 |
| }, |
| { |
| "epoch": 0.009423108631033253, |
| "grad_norm": 11.44570541381836, |
| "learning_rate": 1.9999107426560244e-07, |
| "loss": 1.0681, |
| "step": 10230 |
| }, |
| { |
| "epoch": 0.00943231988091696, |
| "grad_norm": 10.651673316955566, |
| "learning_rate": 1.9999105491471095e-07, |
| "loss": 1.0626, |
| "step": 10240 |
| }, |
| { |
| "epoch": 0.00944153113080067, |
| "grad_norm": 11.448568344116211, |
| "learning_rate": 1.9999103554286686e-07, |
| "loss": 1.0124, |
| "step": 10250 |
| }, |
| { |
| "epoch": 0.009450742380684378, |
| "grad_norm": 10.503666877746582, |
| "learning_rate": 1.999910161500701e-07, |
| "loss": 1.0563, |
| "step": 10260 |
| }, |
| { |
| "epoch": 0.009459953630568086, |
| "grad_norm": 11.241734504699707, |
| "learning_rate": 1.9999099673632078e-07, |
| "loss": 1.04, |
| "step": 10270 |
| }, |
| { |
| "epoch": 0.009469164880451793, |
| "grad_norm": 13.1773042678833, |
| "learning_rate": 1.9999097730161885e-07, |
| "loss": 1.0685, |
| "step": 10280 |
| }, |
| { |
| "epoch": 0.009478376130335501, |
| "grad_norm": 10.578974723815918, |
| "learning_rate": 1.9999095784596435e-07, |
| "loss": 1.0502, |
| "step": 10290 |
| }, |
| { |
| "epoch": 0.009487587380219209, |
| "grad_norm": 15.131848335266113, |
| "learning_rate": 1.999909383693572e-07, |
| "loss": 1.0951, |
| "step": 10300 |
| }, |
| { |
| "epoch": 0.009496798630102917, |
| "grad_norm": 10.200125694274902, |
| "learning_rate": 1.9999091887179752e-07, |
| "loss": 1.0467, |
| "step": 10310 |
| }, |
| { |
| "epoch": 0.009506009879986625, |
| "grad_norm": 13.75231647491455, |
| "learning_rate": 1.9999089935328522e-07, |
| "loss": 1.0818, |
| "step": 10320 |
| }, |
| { |
| "epoch": 0.009515221129870334, |
| "grad_norm": 11.539515495300293, |
| "learning_rate": 1.9999087981382038e-07, |
| "loss": 1.061, |
| "step": 10330 |
| }, |
| { |
| "epoch": 0.009524432379754042, |
| "grad_norm": 11.828240394592285, |
| "learning_rate": 1.9999086025340293e-07, |
| "loss": 1.0534, |
| "step": 10340 |
| }, |
| { |
| "epoch": 0.00953364362963775, |
| "grad_norm": 11.30993938446045, |
| "learning_rate": 1.9999084067203293e-07, |
| "loss": 1.0387, |
| "step": 10350 |
| }, |
| { |
| "epoch": 0.009542854879521457, |
| "grad_norm": 69.48150634765625, |
| "learning_rate": 1.9999082106971035e-07, |
| "loss": 1.0504, |
| "step": 10360 |
| }, |
| { |
| "epoch": 0.009552066129405165, |
| "grad_norm": 11.68458080291748, |
| "learning_rate": 1.9999080144643517e-07, |
| "loss": 1.0651, |
| "step": 10370 |
| }, |
| { |
| "epoch": 0.009561277379288873, |
| "grad_norm": 18.312400817871094, |
| "learning_rate": 1.9999078180220752e-07, |
| "loss": 1.0835, |
| "step": 10380 |
| }, |
| { |
| "epoch": 0.00957048862917258, |
| "grad_norm": 10.953816413879395, |
| "learning_rate": 1.9999076213702724e-07, |
| "loss": 1.053, |
| "step": 10390 |
| }, |
| { |
| "epoch": 0.009579699879056288, |
| "grad_norm": 11.196744918823242, |
| "learning_rate": 1.9999074245089446e-07, |
| "loss": 1.0213, |
| "step": 10400 |
| }, |
| { |
| "epoch": 0.009588911128939996, |
| "grad_norm": 13.228312492370605, |
| "learning_rate": 1.999907227438091e-07, |
| "loss": 1.131, |
| "step": 10410 |
| }, |
| { |
| "epoch": 0.009598122378823706, |
| "grad_norm": 10.745619773864746, |
| "learning_rate": 1.999907030157712e-07, |
| "loss": 1.0866, |
| "step": 10420 |
| }, |
| { |
| "epoch": 0.009607333628707413, |
| "grad_norm": 12.490727424621582, |
| "learning_rate": 1.9999068326678078e-07, |
| "loss": 1.0203, |
| "step": 10430 |
| }, |
| { |
| "epoch": 0.009616544878591121, |
| "grad_norm": 12.255220413208008, |
| "learning_rate": 1.9999066349683782e-07, |
| "loss": 1.0301, |
| "step": 10440 |
| }, |
| { |
| "epoch": 0.009625756128474829, |
| "grad_norm": 12.161656379699707, |
| "learning_rate": 1.9999064370594233e-07, |
| "loss": 1.0781, |
| "step": 10450 |
| }, |
| { |
| "epoch": 0.009634967378358537, |
| "grad_norm": 10.330904960632324, |
| "learning_rate": 1.9999062389409433e-07, |
| "loss": 1.0378, |
| "step": 10460 |
| }, |
| { |
| "epoch": 0.009644178628242244, |
| "grad_norm": 10.937992095947266, |
| "learning_rate": 1.9999060406129377e-07, |
| "loss": 1.0819, |
| "step": 10470 |
| }, |
| { |
| "epoch": 0.009653389878125952, |
| "grad_norm": 10.681947708129883, |
| "learning_rate": 1.999905842075407e-07, |
| "loss": 1.048, |
| "step": 10480 |
| }, |
| { |
| "epoch": 0.00966260112800966, |
| "grad_norm": 10.496880531311035, |
| "learning_rate": 1.9999056433283515e-07, |
| "loss": 1.0542, |
| "step": 10490 |
| }, |
| { |
| "epoch": 0.00967181237789337, |
| "grad_norm": 13.243370056152344, |
| "learning_rate": 1.9999054443717707e-07, |
| "loss": 1.0907, |
| "step": 10500 |
| }, |
| { |
| "epoch": 0.009681023627777077, |
| "grad_norm": 12.429362297058105, |
| "learning_rate": 1.999905245205665e-07, |
| "loss": 1.0617, |
| "step": 10510 |
| }, |
| { |
| "epoch": 0.009690234877660785, |
| "grad_norm": 11.334091186523438, |
| "learning_rate": 1.999905045830034e-07, |
| "loss": 1.0828, |
| "step": 10520 |
| }, |
| { |
| "epoch": 0.009699446127544493, |
| "grad_norm": 10.744280815124512, |
| "learning_rate": 1.9999048462448781e-07, |
| "loss": 1.0251, |
| "step": 10530 |
| }, |
| { |
| "epoch": 0.0097086573774282, |
| "grad_norm": 9.50810718536377, |
| "learning_rate": 1.9999046464501977e-07, |
| "loss": 1.0774, |
| "step": 10540 |
| }, |
| { |
| "epoch": 0.009717868627311908, |
| "grad_norm": 11.54856014251709, |
| "learning_rate": 1.999904446445992e-07, |
| "loss": 1.0495, |
| "step": 10550 |
| }, |
| { |
| "epoch": 0.009727079877195616, |
| "grad_norm": 11.866401672363281, |
| "learning_rate": 1.9999042462322615e-07, |
| "loss": 1.0396, |
| "step": 10560 |
| }, |
| { |
| "epoch": 0.009736291127079324, |
| "grad_norm": 11.135425567626953, |
| "learning_rate": 1.999904045809006e-07, |
| "loss": 1.0604, |
| "step": 10570 |
| }, |
| { |
| "epoch": 0.009745502376963033, |
| "grad_norm": 11.822609901428223, |
| "learning_rate": 1.9999038451762262e-07, |
| "loss": 1.0594, |
| "step": 10580 |
| }, |
| { |
| "epoch": 0.009754713626846741, |
| "grad_norm": 13.137497901916504, |
| "learning_rate": 1.9999036443339214e-07, |
| "loss": 1.0249, |
| "step": 10590 |
| }, |
| { |
| "epoch": 0.009763924876730449, |
| "grad_norm": 10.92626953125, |
| "learning_rate": 1.999903443282092e-07, |
| "loss": 1.0667, |
| "step": 10600 |
| }, |
| { |
| "epoch": 0.009773136126614157, |
| "grad_norm": 10.957594871520996, |
| "learning_rate": 1.9999032420207383e-07, |
| "loss": 1.045, |
| "step": 10610 |
| }, |
| { |
| "epoch": 0.009782347376497864, |
| "grad_norm": 10.834756851196289, |
| "learning_rate": 1.9999030405498596e-07, |
| "loss": 1.0763, |
| "step": 10620 |
| }, |
| { |
| "epoch": 0.009791558626381572, |
| "grad_norm": 11.003363609313965, |
| "learning_rate": 1.9999028388694564e-07, |
| "loss": 1.0369, |
| "step": 10630 |
| }, |
| { |
| "epoch": 0.00980076987626528, |
| "grad_norm": 12.065079689025879, |
| "learning_rate": 1.9999026369795288e-07, |
| "loss": 1.0937, |
| "step": 10640 |
| }, |
| { |
| "epoch": 0.009809981126148988, |
| "grad_norm": 9.540847778320312, |
| "learning_rate": 1.9999024348800767e-07, |
| "loss": 1.0182, |
| "step": 10650 |
| }, |
| { |
| "epoch": 0.009819192376032695, |
| "grad_norm": 11.240922927856445, |
| "learning_rate": 1.9999022325711002e-07, |
| "loss": 1.0733, |
| "step": 10660 |
| }, |
| { |
| "epoch": 0.009828403625916405, |
| "grad_norm": 12.070985794067383, |
| "learning_rate": 1.9999020300525994e-07, |
| "loss": 1.0647, |
| "step": 10670 |
| }, |
| { |
| "epoch": 0.009837614875800113, |
| "grad_norm": 10.385763168334961, |
| "learning_rate": 1.999901827324574e-07, |
| "loss": 1.0575, |
| "step": 10680 |
| }, |
| { |
| "epoch": 0.00984682612568382, |
| "grad_norm": 11.309329986572266, |
| "learning_rate": 1.9999016243870246e-07, |
| "loss": 1.063, |
| "step": 10690 |
| }, |
| { |
| "epoch": 0.009856037375567528, |
| "grad_norm": 11.986614227294922, |
| "learning_rate": 1.9999014212399508e-07, |
| "loss": 1.0109, |
| "step": 10700 |
| }, |
| { |
| "epoch": 0.009865248625451236, |
| "grad_norm": 10.414978981018066, |
| "learning_rate": 1.999901217883353e-07, |
| "loss": 1.0162, |
| "step": 10710 |
| }, |
| { |
| "epoch": 0.009874459875334944, |
| "grad_norm": 10.92629623413086, |
| "learning_rate": 1.9999010143172309e-07, |
| "loss": 1.0512, |
| "step": 10720 |
| }, |
| { |
| "epoch": 0.009883671125218652, |
| "grad_norm": 11.59291934967041, |
| "learning_rate": 1.9999008105415847e-07, |
| "loss": 1.057, |
| "step": 10730 |
| }, |
| { |
| "epoch": 0.00989288237510236, |
| "grad_norm": 12.11353588104248, |
| "learning_rate": 1.9999006065564144e-07, |
| "loss": 1.0911, |
| "step": 10740 |
| }, |
| { |
| "epoch": 0.009902093624986069, |
| "grad_norm": 12.18124771118164, |
| "learning_rate": 1.9999004023617202e-07, |
| "loss": 1.0618, |
| "step": 10750 |
| }, |
| { |
| "epoch": 0.009911304874869777, |
| "grad_norm": 11.09522533416748, |
| "learning_rate": 1.9999001979575015e-07, |
| "loss": 1.0064, |
| "step": 10760 |
| }, |
| { |
| "epoch": 0.009920516124753484, |
| "grad_norm": 10.982993125915527, |
| "learning_rate": 1.9998999933437592e-07, |
| "loss": 1.0457, |
| "step": 10770 |
| }, |
| { |
| "epoch": 0.009929727374637192, |
| "grad_norm": 11.105592727661133, |
| "learning_rate": 1.9998997885204935e-07, |
| "loss": 1.0368, |
| "step": 10780 |
| }, |
| { |
| "epoch": 0.0099389386245209, |
| "grad_norm": 10.019579887390137, |
| "learning_rate": 1.999899583487703e-07, |
| "loss": 0.9833, |
| "step": 10790 |
| }, |
| { |
| "epoch": 0.009948149874404608, |
| "grad_norm": 11.136319160461426, |
| "learning_rate": 1.9998993782453892e-07, |
| "loss": 1.0622, |
| "step": 10800 |
| }, |
| { |
| "epoch": 0.009957361124288315, |
| "grad_norm": 11.089784622192383, |
| "learning_rate": 1.9998991727935517e-07, |
| "loss": 1.0711, |
| "step": 10810 |
| }, |
| { |
| "epoch": 0.009966572374172023, |
| "grad_norm": 11.013596534729004, |
| "learning_rate": 1.9998989671321903e-07, |
| "loss": 0.9907, |
| "step": 10820 |
| }, |
| { |
| "epoch": 0.009975783624055731, |
| "grad_norm": 10.718193054199219, |
| "learning_rate": 1.9998987612613052e-07, |
| "loss": 1.0678, |
| "step": 10830 |
| }, |
| { |
| "epoch": 0.00998499487393944, |
| "grad_norm": 11.073073387145996, |
| "learning_rate": 1.9998985551808967e-07, |
| "loss": 1.0253, |
| "step": 10840 |
| }, |
| { |
| "epoch": 0.009994206123823148, |
| "grad_norm": 10.399203300476074, |
| "learning_rate": 1.9998983488909643e-07, |
| "loss": 1.0914, |
| "step": 10850 |
| }, |
| { |
| "epoch": 0.010003417373706856, |
| "grad_norm": 11.7626953125, |
| "learning_rate": 1.9998981423915083e-07, |
| "loss": 1.0602, |
| "step": 10860 |
| }, |
| { |
| "epoch": 0.010012628623590564, |
| "grad_norm": 10.847984313964844, |
| "learning_rate": 1.999897935682529e-07, |
| "loss": 0.998, |
| "step": 10870 |
| }, |
| { |
| "epoch": 0.010021839873474271, |
| "grad_norm": 11.725564956665039, |
| "learning_rate": 1.9998977287640262e-07, |
| "loss": 1.0327, |
| "step": 10880 |
| }, |
| { |
| "epoch": 0.01003105112335798, |
| "grad_norm": 10.937654495239258, |
| "learning_rate": 1.999897521636e-07, |
| "loss": 1.0841, |
| "step": 10890 |
| }, |
| { |
| "epoch": 0.010040262373241687, |
| "grad_norm": 11.381839752197266, |
| "learning_rate": 1.99989731429845e-07, |
| "loss": 1.0579, |
| "step": 10900 |
| }, |
| { |
| "epoch": 0.010049473623125395, |
| "grad_norm": 10.837337493896484, |
| "learning_rate": 1.9998971067513773e-07, |
| "loss": 1.1115, |
| "step": 10910 |
| }, |
| { |
| "epoch": 0.010058684873009104, |
| "grad_norm": 10.719988822937012, |
| "learning_rate": 1.999896898994781e-07, |
| "loss": 1.0117, |
| "step": 10920 |
| }, |
| { |
| "epoch": 0.010067896122892812, |
| "grad_norm": 11.277645111083984, |
| "learning_rate": 1.999896691028661e-07, |
| "loss": 1.0429, |
| "step": 10930 |
| }, |
| { |
| "epoch": 0.01007710737277652, |
| "grad_norm": 11.057273864746094, |
| "learning_rate": 1.9998964828530185e-07, |
| "loss": 1.0654, |
| "step": 10940 |
| }, |
| { |
| "epoch": 0.010086318622660228, |
| "grad_norm": 10.45768928527832, |
| "learning_rate": 1.9998962744678524e-07, |
| "loss": 1.0479, |
| "step": 10950 |
| }, |
| { |
| "epoch": 0.010095529872543935, |
| "grad_norm": 12.720081329345703, |
| "learning_rate": 1.9998960658731635e-07, |
| "loss": 1.0539, |
| "step": 10960 |
| }, |
| { |
| "epoch": 0.010104741122427643, |
| "grad_norm": 11.445027351379395, |
| "learning_rate": 1.9998958570689511e-07, |
| "loss": 1.0391, |
| "step": 10970 |
| }, |
| { |
| "epoch": 0.01011395237231135, |
| "grad_norm": 10.973172187805176, |
| "learning_rate": 1.999895648055216e-07, |
| "loss": 1.054, |
| "step": 10980 |
| }, |
| { |
| "epoch": 0.010123163622195059, |
| "grad_norm": 12.430107116699219, |
| "learning_rate": 1.9998954388319576e-07, |
| "loss": 1.0129, |
| "step": 10990 |
| }, |
| { |
| "epoch": 0.010132374872078766, |
| "grad_norm": 11.580085754394531, |
| "learning_rate": 1.9998952293991767e-07, |
| "loss": 1.0698, |
| "step": 11000 |
| }, |
| { |
| "epoch": 0.010141586121962476, |
| "grad_norm": 11.905829429626465, |
| "learning_rate": 1.9998950197568724e-07, |
| "loss": 1.0592, |
| "step": 11010 |
| }, |
| { |
| "epoch": 0.010150797371846184, |
| "grad_norm": 12.367510795593262, |
| "learning_rate": 1.9998948099050458e-07, |
| "loss": 1.0333, |
| "step": 11020 |
| }, |
| { |
| "epoch": 0.010160008621729891, |
| "grad_norm": 10.181341171264648, |
| "learning_rate": 1.9998945998436957e-07, |
| "loss": 1.0621, |
| "step": 11030 |
| }, |
| { |
| "epoch": 0.0101692198716136, |
| "grad_norm": 11.343079566955566, |
| "learning_rate": 1.9998943895728236e-07, |
| "loss": 1.0382, |
| "step": 11040 |
| }, |
| { |
| "epoch": 0.010178431121497307, |
| "grad_norm": 12.176451683044434, |
| "learning_rate": 1.9998941790924281e-07, |
| "loss": 1.0702, |
| "step": 11050 |
| }, |
| { |
| "epoch": 0.010187642371381015, |
| "grad_norm": 41.720909118652344, |
| "learning_rate": 1.9998939684025103e-07, |
| "loss": 1.0766, |
| "step": 11060 |
| }, |
| { |
| "epoch": 0.010196853621264722, |
| "grad_norm": 10.554953575134277, |
| "learning_rate": 1.9998937575030696e-07, |
| "loss": 1.0484, |
| "step": 11070 |
| }, |
| { |
| "epoch": 0.01020606487114843, |
| "grad_norm": 11.935941696166992, |
| "learning_rate": 1.9998935463941066e-07, |
| "loss": 1.0384, |
| "step": 11080 |
| }, |
| { |
| "epoch": 0.01021527612103214, |
| "grad_norm": 11.626418113708496, |
| "learning_rate": 1.999893335075621e-07, |
| "loss": 1.0802, |
| "step": 11090 |
| }, |
| { |
| "epoch": 0.010224487370915848, |
| "grad_norm": 10.990792274475098, |
| "learning_rate": 1.9998931235476127e-07, |
| "loss": 1.0559, |
| "step": 11100 |
| }, |
| { |
| "epoch": 0.010233698620799555, |
| "grad_norm": 11.926664352416992, |
| "learning_rate": 1.999892911810082e-07, |
| "loss": 1.0566, |
| "step": 11110 |
| }, |
| { |
| "epoch": 0.010242909870683263, |
| "grad_norm": 9.343741416931152, |
| "learning_rate": 1.9998926998630287e-07, |
| "loss": 1.0399, |
| "step": 11120 |
| }, |
| { |
| "epoch": 0.01025212112056697, |
| "grad_norm": 9.005437850952148, |
| "learning_rate": 1.9998924877064532e-07, |
| "loss": 1.0437, |
| "step": 11130 |
| }, |
| { |
| "epoch": 0.010261332370450679, |
| "grad_norm": 11.158332824707031, |
| "learning_rate": 1.9998922753403554e-07, |
| "loss": 1.036, |
| "step": 11140 |
| }, |
| { |
| "epoch": 0.010270543620334386, |
| "grad_norm": 11.71802043914795, |
| "learning_rate": 1.9998920627647352e-07, |
| "loss": 1.0713, |
| "step": 11150 |
| }, |
| { |
| "epoch": 0.010279754870218094, |
| "grad_norm": 9.552002906799316, |
| "learning_rate": 1.999891849979593e-07, |
| "loss": 1.026, |
| "step": 11160 |
| }, |
| { |
| "epoch": 0.010288966120101804, |
| "grad_norm": 10.18093490600586, |
| "learning_rate": 1.9998916369849284e-07, |
| "loss": 1.0273, |
| "step": 11170 |
| }, |
| { |
| "epoch": 0.010298177369985511, |
| "grad_norm": 11.673882484436035, |
| "learning_rate": 1.9998914237807418e-07, |
| "loss": 1.0336, |
| "step": 11180 |
| }, |
| { |
| "epoch": 0.010307388619869219, |
| "grad_norm": 11.52221393585205, |
| "learning_rate": 1.9998912103670328e-07, |
| "loss": 1.0426, |
| "step": 11190 |
| }, |
| { |
| "epoch": 0.010316599869752927, |
| "grad_norm": 11.424299240112305, |
| "learning_rate": 1.999890996743802e-07, |
| "loss": 1.0202, |
| "step": 11200 |
| }, |
| { |
| "epoch": 0.010325811119636635, |
| "grad_norm": 11.781866073608398, |
| "learning_rate": 1.999890782911049e-07, |
| "loss": 1.0761, |
| "step": 11210 |
| }, |
| { |
| "epoch": 0.010335022369520342, |
| "grad_norm": 12.714162826538086, |
| "learning_rate": 1.9998905688687743e-07, |
| "loss": 1.0336, |
| "step": 11220 |
| }, |
| { |
| "epoch": 0.01034423361940405, |
| "grad_norm": 11.70817756652832, |
| "learning_rate": 1.9998903546169775e-07, |
| "loss": 1.0537, |
| "step": 11230 |
| }, |
| { |
| "epoch": 0.010353444869287758, |
| "grad_norm": 10.69643783569336, |
| "learning_rate": 1.9998901401556587e-07, |
| "loss": 1.0107, |
| "step": 11240 |
| }, |
| { |
| "epoch": 0.010362656119171466, |
| "grad_norm": 10.04344367980957, |
| "learning_rate": 1.9998899254848181e-07, |
| "loss": 1.0484, |
| "step": 11250 |
| }, |
| { |
| "epoch": 0.010371867369055175, |
| "grad_norm": 11.598608016967773, |
| "learning_rate": 1.999889710604456e-07, |
| "loss": 1.0958, |
| "step": 11260 |
| }, |
| { |
| "epoch": 0.010381078618938883, |
| "grad_norm": 10.56346607208252, |
| "learning_rate": 1.9998894955145718e-07, |
| "loss": 1.0281, |
| "step": 11270 |
| }, |
| { |
| "epoch": 0.01039028986882259, |
| "grad_norm": 11.793848991394043, |
| "learning_rate": 1.999889280215166e-07, |
| "loss": 1.0511, |
| "step": 11280 |
| }, |
| { |
| "epoch": 0.010399501118706298, |
| "grad_norm": 11.62499713897705, |
| "learning_rate": 1.9998890647062385e-07, |
| "loss": 1.119, |
| "step": 11290 |
| }, |
| { |
| "epoch": 0.010408712368590006, |
| "grad_norm": 12.892790794372559, |
| "learning_rate": 1.9998888489877894e-07, |
| "loss": 1.1056, |
| "step": 11300 |
| }, |
| { |
| "epoch": 0.010417923618473714, |
| "grad_norm": 10.436480522155762, |
| "learning_rate": 1.9998886330598188e-07, |
| "loss": 1.0395, |
| "step": 11310 |
| }, |
| { |
| "epoch": 0.010427134868357422, |
| "grad_norm": 37.64136505126953, |
| "learning_rate": 1.9998884169223266e-07, |
| "loss": 1.0522, |
| "step": 11320 |
| }, |
| { |
| "epoch": 0.01043634611824113, |
| "grad_norm": 10.522521018981934, |
| "learning_rate": 1.999888200575313e-07, |
| "loss": 1.0441, |
| "step": 11330 |
| }, |
| { |
| "epoch": 0.010445557368124839, |
| "grad_norm": 11.5386323928833, |
| "learning_rate": 1.999887984018778e-07, |
| "loss": 1.0446, |
| "step": 11340 |
| }, |
| { |
| "epoch": 0.010454768618008547, |
| "grad_norm": 9.8550386428833, |
| "learning_rate": 1.9998877672527213e-07, |
| "loss": 1.0219, |
| "step": 11350 |
| }, |
| { |
| "epoch": 0.010463979867892255, |
| "grad_norm": 11.39132308959961, |
| "learning_rate": 1.9998875502771438e-07, |
| "loss": 1.0657, |
| "step": 11360 |
| }, |
| { |
| "epoch": 0.010473191117775962, |
| "grad_norm": 11.237606048583984, |
| "learning_rate": 1.9998873330920447e-07, |
| "loss": 1.0394, |
| "step": 11370 |
| }, |
| { |
| "epoch": 0.01048240236765967, |
| "grad_norm": 12.085046768188477, |
| "learning_rate": 1.999887115697424e-07, |
| "loss": 1.0784, |
| "step": 11380 |
| }, |
| { |
| "epoch": 0.010491613617543378, |
| "grad_norm": 19.104768753051758, |
| "learning_rate": 1.9998868980932827e-07, |
| "loss": 1.0477, |
| "step": 11390 |
| }, |
| { |
| "epoch": 0.010500824867427086, |
| "grad_norm": 20.65506935119629, |
| "learning_rate": 1.99988668027962e-07, |
| "loss": 1.0545, |
| "step": 11400 |
| }, |
| { |
| "epoch": 0.010510036117310793, |
| "grad_norm": 10.546294212341309, |
| "learning_rate": 1.999886462256436e-07, |
| "loss": 1.0681, |
| "step": 11410 |
| }, |
| { |
| "epoch": 0.010519247367194501, |
| "grad_norm": 9.988753318786621, |
| "learning_rate": 1.9998862440237315e-07, |
| "loss": 1.0077, |
| "step": 11420 |
| }, |
| { |
| "epoch": 0.01052845861707821, |
| "grad_norm": 11.502646446228027, |
| "learning_rate": 1.9998860255815052e-07, |
| "loss": 1.0401, |
| "step": 11430 |
| }, |
| { |
| "epoch": 0.010537669866961918, |
| "grad_norm": 10.733711242675781, |
| "learning_rate": 1.9998858069297583e-07, |
| "loss": 1.0293, |
| "step": 11440 |
| }, |
| { |
| "epoch": 0.010546881116845626, |
| "grad_norm": 10.6617431640625, |
| "learning_rate": 1.9998855880684906e-07, |
| "loss": 1.0365, |
| "step": 11450 |
| }, |
| { |
| "epoch": 0.010556092366729334, |
| "grad_norm": 11.357271194458008, |
| "learning_rate": 1.999885368997702e-07, |
| "loss": 1.0612, |
| "step": 11460 |
| }, |
| { |
| "epoch": 0.010565303616613042, |
| "grad_norm": 11.047504425048828, |
| "learning_rate": 1.9998851497173924e-07, |
| "loss": 1.068, |
| "step": 11470 |
| }, |
| { |
| "epoch": 0.01057451486649675, |
| "grad_norm": 10.95936393737793, |
| "learning_rate": 1.999884930227562e-07, |
| "loss": 1.066, |
| "step": 11480 |
| }, |
| { |
| "epoch": 0.010583726116380457, |
| "grad_norm": 15.157825469970703, |
| "learning_rate": 1.999884710528211e-07, |
| "loss": 1.0715, |
| "step": 11490 |
| }, |
| { |
| "epoch": 0.010592937366264165, |
| "grad_norm": 10.480347633361816, |
| "learning_rate": 1.999884490619339e-07, |
| "loss": 1.0993, |
| "step": 11500 |
| }, |
| { |
| "epoch": 0.010602148616147875, |
| "grad_norm": 10.28208065032959, |
| "learning_rate": 1.9998842705009466e-07, |
| "loss": 1.0609, |
| "step": 11510 |
| }, |
| { |
| "epoch": 0.010611359866031582, |
| "grad_norm": 11.873906135559082, |
| "learning_rate": 1.9998840501730336e-07, |
| "loss": 1.037, |
| "step": 11520 |
| }, |
| { |
| "epoch": 0.01062057111591529, |
| "grad_norm": 12.80936336517334, |
| "learning_rate": 1.9998838296356e-07, |
| "loss": 1.0644, |
| "step": 11530 |
| }, |
| { |
| "epoch": 0.010629782365798998, |
| "grad_norm": 10.652593612670898, |
| "learning_rate": 1.9998836088886458e-07, |
| "loss": 1.0427, |
| "step": 11540 |
| }, |
| { |
| "epoch": 0.010638993615682706, |
| "grad_norm": 10.713086128234863, |
| "learning_rate": 1.9998833879321712e-07, |
| "loss": 1.0618, |
| "step": 11550 |
| }, |
| { |
| "epoch": 0.010648204865566413, |
| "grad_norm": 10.763566017150879, |
| "learning_rate": 1.999883166766176e-07, |
| "loss": 1.0297, |
| "step": 11560 |
| }, |
| { |
| "epoch": 0.010657416115450121, |
| "grad_norm": 11.8670654296875, |
| "learning_rate": 1.9998829453906607e-07, |
| "loss": 1.0804, |
| "step": 11570 |
| }, |
| { |
| "epoch": 0.010666627365333829, |
| "grad_norm": 9.109167098999023, |
| "learning_rate": 1.9998827238056252e-07, |
| "loss": 1.0369, |
| "step": 11580 |
| }, |
| { |
| "epoch": 0.010675838615217537, |
| "grad_norm": 10.6466703414917, |
| "learning_rate": 1.999882502011069e-07, |
| "loss": 1.0565, |
| "step": 11590 |
| }, |
| { |
| "epoch": 0.010685049865101246, |
| "grad_norm": 12.753141403198242, |
| "learning_rate": 1.9998822800069926e-07, |
| "loss": 1.0241, |
| "step": 11600 |
| }, |
| { |
| "epoch": 0.010694261114984954, |
| "grad_norm": 11.53568172454834, |
| "learning_rate": 1.9998820577933962e-07, |
| "loss": 1.0321, |
| "step": 11610 |
| }, |
| { |
| "epoch": 0.010703472364868662, |
| "grad_norm": 11.746357917785645, |
| "learning_rate": 1.9998818353702795e-07, |
| "loss": 1.0178, |
| "step": 11620 |
| }, |
| { |
| "epoch": 0.01071268361475237, |
| "grad_norm": 11.154654502868652, |
| "learning_rate": 1.9998816127376428e-07, |
| "loss": 1.0617, |
| "step": 11630 |
| }, |
| { |
| "epoch": 0.010721894864636077, |
| "grad_norm": 10.779613494873047, |
| "learning_rate": 1.999881389895486e-07, |
| "loss": 1.0601, |
| "step": 11640 |
| }, |
| { |
| "epoch": 0.010731106114519785, |
| "grad_norm": 10.55121898651123, |
| "learning_rate": 1.9998811668438092e-07, |
| "loss": 1.0181, |
| "step": 11650 |
| }, |
| { |
| "epoch": 0.010740317364403493, |
| "grad_norm": 11.700348854064941, |
| "learning_rate": 1.9998809435826124e-07, |
| "loss": 0.987, |
| "step": 11660 |
| }, |
| { |
| "epoch": 0.0107495286142872, |
| "grad_norm": 10.756016731262207, |
| "learning_rate": 1.999880720111896e-07, |
| "loss": 1.0636, |
| "step": 11670 |
| }, |
| { |
| "epoch": 0.01075873986417091, |
| "grad_norm": 10.366384506225586, |
| "learning_rate": 1.9998804964316593e-07, |
| "loss": 1.015, |
| "step": 11680 |
| }, |
| { |
| "epoch": 0.010767951114054618, |
| "grad_norm": 13.0294189453125, |
| "learning_rate": 1.999880272541903e-07, |
| "loss": 1.0869, |
| "step": 11690 |
| }, |
| { |
| "epoch": 0.010777162363938326, |
| "grad_norm": 17.25733184814453, |
| "learning_rate": 1.999880048442627e-07, |
| "loss": 1.0237, |
| "step": 11700 |
| }, |
| { |
| "epoch": 0.010786373613822033, |
| "grad_norm": 11.593828201293945, |
| "learning_rate": 1.999879824133831e-07, |
| "loss": 0.9993, |
| "step": 11710 |
| }, |
| { |
| "epoch": 0.010795584863705741, |
| "grad_norm": 22.637712478637695, |
| "learning_rate": 1.9998795996155152e-07, |
| "loss": 1.037, |
| "step": 11720 |
| }, |
| { |
| "epoch": 0.010804796113589449, |
| "grad_norm": 9.423585891723633, |
| "learning_rate": 1.9998793748876804e-07, |
| "loss": 1.0428, |
| "step": 11730 |
| }, |
| { |
| "epoch": 0.010814007363473157, |
| "grad_norm": 10.435938835144043, |
| "learning_rate": 1.9998791499503256e-07, |
| "loss": 1.0475, |
| "step": 11740 |
| }, |
| { |
| "epoch": 0.010823218613356864, |
| "grad_norm": 11.065678596496582, |
| "learning_rate": 1.9998789248034514e-07, |
| "loss": 1.0634, |
| "step": 11750 |
| }, |
| { |
| "epoch": 0.010832429863240574, |
| "grad_norm": 11.173529624938965, |
| "learning_rate": 1.9998786994470575e-07, |
| "loss": 1.0114, |
| "step": 11760 |
| }, |
| { |
| "epoch": 0.010841641113124282, |
| "grad_norm": 11.393064498901367, |
| "learning_rate": 1.9998784738811442e-07, |
| "loss": 1.0393, |
| "step": 11770 |
| }, |
| { |
| "epoch": 0.01085085236300799, |
| "grad_norm": 12.212664604187012, |
| "learning_rate": 1.9998782481057117e-07, |
| "loss": 0.9991, |
| "step": 11780 |
| }, |
| { |
| "epoch": 0.010860063612891697, |
| "grad_norm": 11.26770305633545, |
| "learning_rate": 1.9998780221207598e-07, |
| "loss": 1.0482, |
| "step": 11790 |
| }, |
| { |
| "epoch": 0.010869274862775405, |
| "grad_norm": 10.68546199798584, |
| "learning_rate": 1.9998777959262885e-07, |
| "loss": 1.0659, |
| "step": 11800 |
| }, |
| { |
| "epoch": 0.010878486112659113, |
| "grad_norm": 11.5813570022583, |
| "learning_rate": 1.999877569522298e-07, |
| "loss": 1.0303, |
| "step": 11810 |
| }, |
| { |
| "epoch": 0.01088769736254282, |
| "grad_norm": 10.201468467712402, |
| "learning_rate": 1.9998773429087884e-07, |
| "loss": 1.0408, |
| "step": 11820 |
| }, |
| { |
| "epoch": 0.010896908612426528, |
| "grad_norm": 10.659581184387207, |
| "learning_rate": 1.9998771160857596e-07, |
| "loss": 1.0589, |
| "step": 11830 |
| }, |
| { |
| "epoch": 0.010906119862310236, |
| "grad_norm": 10.418724060058594, |
| "learning_rate": 1.9998768890532113e-07, |
| "loss": 1.0123, |
| "step": 11840 |
| }, |
| { |
| "epoch": 0.010915331112193945, |
| "grad_norm": 10.496803283691406, |
| "learning_rate": 1.9998766618111445e-07, |
| "loss": 1.0355, |
| "step": 11850 |
| }, |
| { |
| "epoch": 0.010924542362077653, |
| "grad_norm": 13.37877368927002, |
| "learning_rate": 1.9998764343595585e-07, |
| "loss": 1.0602, |
| "step": 11860 |
| }, |
| { |
| "epoch": 0.010933753611961361, |
| "grad_norm": 9.89102554321289, |
| "learning_rate": 1.9998762066984533e-07, |
| "loss": 1.0322, |
| "step": 11870 |
| }, |
| { |
| "epoch": 0.010942964861845069, |
| "grad_norm": 10.393887519836426, |
| "learning_rate": 1.9998759788278295e-07, |
| "loss": 1.0646, |
| "step": 11880 |
| }, |
| { |
| "epoch": 0.010952176111728777, |
| "grad_norm": 11.223227500915527, |
| "learning_rate": 1.9998757507476865e-07, |
| "loss": 1.0463, |
| "step": 11890 |
| }, |
| { |
| "epoch": 0.010961387361612484, |
| "grad_norm": 11.836145401000977, |
| "learning_rate": 1.999875522458025e-07, |
| "loss": 1.0414, |
| "step": 11900 |
| }, |
| { |
| "epoch": 0.010970598611496192, |
| "grad_norm": 10.192471504211426, |
| "learning_rate": 1.9998752939588445e-07, |
| "loss": 1.0344, |
| "step": 11910 |
| }, |
| { |
| "epoch": 0.0109798098613799, |
| "grad_norm": 10.622711181640625, |
| "learning_rate": 1.9998750652501454e-07, |
| "loss": 1.0223, |
| "step": 11920 |
| }, |
| { |
| "epoch": 0.01098902111126361, |
| "grad_norm": 11.28220272064209, |
| "learning_rate": 1.9998748363319276e-07, |
| "loss": 1.0772, |
| "step": 11930 |
| }, |
| { |
| "epoch": 0.010998232361147317, |
| "grad_norm": 10.325407981872559, |
| "learning_rate": 1.999874607204191e-07, |
| "loss": 1.0982, |
| "step": 11940 |
| }, |
| { |
| "epoch": 0.011007443611031025, |
| "grad_norm": 10.70203685760498, |
| "learning_rate": 1.999874377866936e-07, |
| "loss": 1.0181, |
| "step": 11950 |
| }, |
| { |
| "epoch": 0.011016654860914733, |
| "grad_norm": 10.192893028259277, |
| "learning_rate": 1.9998741483201624e-07, |
| "loss": 1.0188, |
| "step": 11960 |
| }, |
| { |
| "epoch": 0.01102586611079844, |
| "grad_norm": 9.858963012695312, |
| "learning_rate": 1.9998739185638701e-07, |
| "loss": 1.0504, |
| "step": 11970 |
| }, |
| { |
| "epoch": 0.011035077360682148, |
| "grad_norm": 11.271580696105957, |
| "learning_rate": 1.9998736885980598e-07, |
| "loss": 1.0194, |
| "step": 11980 |
| }, |
| { |
| "epoch": 0.011044288610565856, |
| "grad_norm": 10.199090957641602, |
| "learning_rate": 1.9998734584227308e-07, |
| "loss": 1.0439, |
| "step": 11990 |
| }, |
| { |
| "epoch": 0.011053499860449564, |
| "grad_norm": 12.98397445678711, |
| "learning_rate": 1.9998732280378835e-07, |
| "loss": 1.0169, |
| "step": 12000 |
| }, |
| { |
| "epoch": 0.011062711110333271, |
| "grad_norm": 14.460063934326172, |
| "learning_rate": 1.9998729974435183e-07, |
| "loss": 1.0376, |
| "step": 12010 |
| }, |
| { |
| "epoch": 0.011071922360216981, |
| "grad_norm": 10.411107063293457, |
| "learning_rate": 1.9998727666396342e-07, |
| "loss": 1.0494, |
| "step": 12020 |
| }, |
| { |
| "epoch": 0.011081133610100689, |
| "grad_norm": 14.020751953125, |
| "learning_rate": 1.9998725356262323e-07, |
| "loss": 1.0056, |
| "step": 12030 |
| }, |
| { |
| "epoch": 0.011090344859984396, |
| "grad_norm": 10.027836799621582, |
| "learning_rate": 1.999872304403312e-07, |
| "loss": 1.0171, |
| "step": 12040 |
| }, |
| { |
| "epoch": 0.011099556109868104, |
| "grad_norm": 10.819367408752441, |
| "learning_rate": 1.999872072970874e-07, |
| "loss": 1.0527, |
| "step": 12050 |
| }, |
| { |
| "epoch": 0.011108767359751812, |
| "grad_norm": 11.160172462463379, |
| "learning_rate": 1.9998718413289178e-07, |
| "loss": 0.9938, |
| "step": 12060 |
| }, |
| { |
| "epoch": 0.01111797860963552, |
| "grad_norm": 10.848843574523926, |
| "learning_rate": 1.9998716094774432e-07, |
| "loss": 1.0421, |
| "step": 12070 |
| }, |
| { |
| "epoch": 0.011127189859519227, |
| "grad_norm": 10.68772029876709, |
| "learning_rate": 1.999871377416451e-07, |
| "loss": 1.037, |
| "step": 12080 |
| }, |
| { |
| "epoch": 0.011136401109402935, |
| "grad_norm": 11.4147367477417, |
| "learning_rate": 1.9998711451459408e-07, |
| "loss": 1.0423, |
| "step": 12090 |
| }, |
| { |
| "epoch": 0.011145612359286645, |
| "grad_norm": 11.017667770385742, |
| "learning_rate": 1.9998709126659127e-07, |
| "loss": 1.0759, |
| "step": 12100 |
| }, |
| { |
| "epoch": 0.011154823609170353, |
| "grad_norm": 10.885127067565918, |
| "learning_rate": 1.9998706799763668e-07, |
| "loss": 1.0682, |
| "step": 12110 |
| }, |
| { |
| "epoch": 0.01116403485905406, |
| "grad_norm": 18.020606994628906, |
| "learning_rate": 1.9998704470773034e-07, |
| "loss": 1.0154, |
| "step": 12120 |
| }, |
| { |
| "epoch": 0.011173246108937768, |
| "grad_norm": 15.228361129760742, |
| "learning_rate": 1.999870213968722e-07, |
| "loss": 1.1171, |
| "step": 12130 |
| }, |
| { |
| "epoch": 0.011182457358821476, |
| "grad_norm": 10.499837875366211, |
| "learning_rate": 1.9998699806506232e-07, |
| "loss": 1.0184, |
| "step": 12140 |
| }, |
| { |
| "epoch": 0.011191668608705184, |
| "grad_norm": 11.657089233398438, |
| "learning_rate": 1.9998697471230065e-07, |
| "loss": 1.0017, |
| "step": 12150 |
| }, |
| { |
| "epoch": 0.011200879858588891, |
| "grad_norm": 11.388848304748535, |
| "learning_rate": 1.9998695133858722e-07, |
| "loss": 1.0776, |
| "step": 12160 |
| }, |
| { |
| "epoch": 0.011210091108472599, |
| "grad_norm": 11.83633041381836, |
| "learning_rate": 1.9998692794392206e-07, |
| "loss": 1.0794, |
| "step": 12170 |
| }, |
| { |
| "epoch": 0.011219302358356307, |
| "grad_norm": 10.981163024902344, |
| "learning_rate": 1.9998690452830514e-07, |
| "loss": 1.0693, |
| "step": 12180 |
| }, |
| { |
| "epoch": 0.011228513608240016, |
| "grad_norm": 11.598734855651855, |
| "learning_rate": 1.999868810917365e-07, |
| "loss": 1.0091, |
| "step": 12190 |
| }, |
| { |
| "epoch": 0.011237724858123724, |
| "grad_norm": 16.457763671875, |
| "learning_rate": 1.9998685763421606e-07, |
| "loss": 1.0677, |
| "step": 12200 |
| }, |
| { |
| "epoch": 0.011246936108007432, |
| "grad_norm": 10.016337394714355, |
| "learning_rate": 1.9998683415574396e-07, |
| "loss": 1.0643, |
| "step": 12210 |
| }, |
| { |
| "epoch": 0.01125614735789114, |
| "grad_norm": 11.823240280151367, |
| "learning_rate": 1.999868106563201e-07, |
| "loss": 1.018, |
| "step": 12220 |
| }, |
| { |
| "epoch": 0.011265358607774847, |
| "grad_norm": 10.42904281616211, |
| "learning_rate": 1.9998678713594452e-07, |
| "loss": 1.031, |
| "step": 12230 |
| }, |
| { |
| "epoch": 0.011274569857658555, |
| "grad_norm": 10.224181175231934, |
| "learning_rate": 1.9998676359461722e-07, |
| "loss": 1.0676, |
| "step": 12240 |
| }, |
| { |
| "epoch": 0.011283781107542263, |
| "grad_norm": 10.682973861694336, |
| "learning_rate": 1.9998674003233825e-07, |
| "loss": 1.0349, |
| "step": 12250 |
| }, |
| { |
| "epoch": 0.01129299235742597, |
| "grad_norm": 9.559045791625977, |
| "learning_rate": 1.9998671644910751e-07, |
| "loss": 1.0423, |
| "step": 12260 |
| }, |
| { |
| "epoch": 0.01130220360730968, |
| "grad_norm": 10.4341402053833, |
| "learning_rate": 1.999866928449251e-07, |
| "loss": 1.0578, |
| "step": 12270 |
| }, |
| { |
| "epoch": 0.011311414857193388, |
| "grad_norm": 10.95155143737793, |
| "learning_rate": 1.99986669219791e-07, |
| "loss": 1.0875, |
| "step": 12280 |
| }, |
| { |
| "epoch": 0.011320626107077096, |
| "grad_norm": 11.40566635131836, |
| "learning_rate": 1.9998664557370517e-07, |
| "loss": 1.0225, |
| "step": 12290 |
| }, |
| { |
| "epoch": 0.011329837356960804, |
| "grad_norm": 12.1041841506958, |
| "learning_rate": 1.9998662190666767e-07, |
| "loss": 1.0277, |
| "step": 12300 |
| }, |
| { |
| "epoch": 0.011339048606844511, |
| "grad_norm": 11.432059288024902, |
| "learning_rate": 1.999865982186785e-07, |
| "loss": 1.0416, |
| "step": 12310 |
| }, |
| { |
| "epoch": 0.011348259856728219, |
| "grad_norm": 10.454280853271484, |
| "learning_rate": 1.9998657450973764e-07, |
| "loss": 1.0116, |
| "step": 12320 |
| }, |
| { |
| "epoch": 0.011357471106611927, |
| "grad_norm": 10.903828620910645, |
| "learning_rate": 1.9998655077984511e-07, |
| "loss": 1.0488, |
| "step": 12330 |
| }, |
| { |
| "epoch": 0.011366682356495635, |
| "grad_norm": 10.960176467895508, |
| "learning_rate": 1.999865270290009e-07, |
| "loss": 1.0135, |
| "step": 12340 |
| }, |
| { |
| "epoch": 0.011375893606379344, |
| "grad_norm": 9.959882736206055, |
| "learning_rate": 1.9998650325720505e-07, |
| "loss": 1.0219, |
| "step": 12350 |
| }, |
| { |
| "epoch": 0.011385104856263052, |
| "grad_norm": 11.631726264953613, |
| "learning_rate": 1.9998647946445754e-07, |
| "loss": 1.0042, |
| "step": 12360 |
| }, |
| { |
| "epoch": 0.01139431610614676, |
| "grad_norm": 10.551362037658691, |
| "learning_rate": 1.9998645565075838e-07, |
| "loss": 1.0837, |
| "step": 12370 |
| }, |
| { |
| "epoch": 0.011403527356030467, |
| "grad_norm": 11.355913162231445, |
| "learning_rate": 1.9998643181610757e-07, |
| "loss": 1.0372, |
| "step": 12380 |
| }, |
| { |
| "epoch": 0.011412738605914175, |
| "grad_norm": 16.121912002563477, |
| "learning_rate": 1.999864079605051e-07, |
| "loss": 1.0354, |
| "step": 12390 |
| }, |
| { |
| "epoch": 0.011421949855797883, |
| "grad_norm": 11.029335021972656, |
| "learning_rate": 1.99986384083951e-07, |
| "loss": 1.0477, |
| "step": 12400 |
| }, |
| { |
| "epoch": 0.01143116110568159, |
| "grad_norm": 11.397917747497559, |
| "learning_rate": 1.9998636018644528e-07, |
| "loss": 1.0635, |
| "step": 12410 |
| }, |
| { |
| "epoch": 0.011440372355565298, |
| "grad_norm": 11.816519737243652, |
| "learning_rate": 1.999863362679879e-07, |
| "loss": 1.0446, |
| "step": 12420 |
| }, |
| { |
| "epoch": 0.011449583605449006, |
| "grad_norm": 10.523292541503906, |
| "learning_rate": 1.9998631232857895e-07, |
| "loss": 1.023, |
| "step": 12430 |
| }, |
| { |
| "epoch": 0.011458794855332716, |
| "grad_norm": 10.206052780151367, |
| "learning_rate": 1.999862883682183e-07, |
| "loss": 1.0698, |
| "step": 12440 |
| }, |
| { |
| "epoch": 0.011468006105216423, |
| "grad_norm": 11.08065414428711, |
| "learning_rate": 1.999862643869061e-07, |
| "loss": 1.008, |
| "step": 12450 |
| }, |
| { |
| "epoch": 0.011477217355100131, |
| "grad_norm": 11.355118751525879, |
| "learning_rate": 1.999862403846423e-07, |
| "loss": 1.05, |
| "step": 12460 |
| }, |
| { |
| "epoch": 0.011486428604983839, |
| "grad_norm": 10.384430885314941, |
| "learning_rate": 1.9998621636142688e-07, |
| "loss": 0.978, |
| "step": 12470 |
| }, |
| { |
| "epoch": 0.011495639854867547, |
| "grad_norm": 12.98556900024414, |
| "learning_rate": 1.9998619231725983e-07, |
| "loss": 1.0776, |
| "step": 12480 |
| }, |
| { |
| "epoch": 0.011504851104751255, |
| "grad_norm": 11.405404090881348, |
| "learning_rate": 1.9998616825214122e-07, |
| "loss": 1.0319, |
| "step": 12490 |
| }, |
| { |
| "epoch": 0.011514062354634962, |
| "grad_norm": 10.554040908813477, |
| "learning_rate": 1.9998614416607102e-07, |
| "loss": 1.0371, |
| "step": 12500 |
| }, |
| { |
| "epoch": 0.01152327360451867, |
| "grad_norm": 10.350462913513184, |
| "learning_rate": 1.9998612005904925e-07, |
| "loss": 1.0045, |
| "step": 12510 |
| }, |
| { |
| "epoch": 0.01153248485440238, |
| "grad_norm": 9.919585227966309, |
| "learning_rate": 1.9998609593107588e-07, |
| "loss": 1.045, |
| "step": 12520 |
| }, |
| { |
| "epoch": 0.011541696104286087, |
| "grad_norm": 13.325592041015625, |
| "learning_rate": 1.9998607178215096e-07, |
| "loss": 1.0465, |
| "step": 12530 |
| }, |
| { |
| "epoch": 0.011550907354169795, |
| "grad_norm": 11.964670181274414, |
| "learning_rate": 1.9998604761227447e-07, |
| "loss": 1.0364, |
| "step": 12540 |
| }, |
| { |
| "epoch": 0.011560118604053503, |
| "grad_norm": 10.877337455749512, |
| "learning_rate": 1.9998602342144638e-07, |
| "loss": 1.0634, |
| "step": 12550 |
| }, |
| { |
| "epoch": 0.01156932985393721, |
| "grad_norm": 10.8394193649292, |
| "learning_rate": 1.9998599920966678e-07, |
| "loss": 0.9854, |
| "step": 12560 |
| }, |
| { |
| "epoch": 0.011578541103820918, |
| "grad_norm": 11.480612754821777, |
| "learning_rate": 1.999859749769356e-07, |
| "loss": 1.0722, |
| "step": 12570 |
| }, |
| { |
| "epoch": 0.011587752353704626, |
| "grad_norm": 13.105162620544434, |
| "learning_rate": 1.9998595072325285e-07, |
| "loss": 1.0402, |
| "step": 12580 |
| }, |
| { |
| "epoch": 0.011596963603588334, |
| "grad_norm": 10.188638687133789, |
| "learning_rate": 1.999859264486186e-07, |
| "loss": 1.0322, |
| "step": 12590 |
| }, |
| { |
| "epoch": 0.011606174853472042, |
| "grad_norm": 9.734145164489746, |
| "learning_rate": 1.999859021530328e-07, |
| "loss": 1.0058, |
| "step": 12600 |
| }, |
| { |
| "epoch": 0.011615386103355751, |
| "grad_norm": 9.922042846679688, |
| "learning_rate": 1.9998587783649547e-07, |
| "loss": 1.0173, |
| "step": 12610 |
| }, |
| { |
| "epoch": 0.011624597353239459, |
| "grad_norm": 11.062028884887695, |
| "learning_rate": 1.9998585349900662e-07, |
| "loss": 1.0429, |
| "step": 12620 |
| }, |
| { |
| "epoch": 0.011633808603123167, |
| "grad_norm": 11.106369972229004, |
| "learning_rate": 1.9998582914056622e-07, |
| "loss": 1.0435, |
| "step": 12630 |
| }, |
| { |
| "epoch": 0.011643019853006874, |
| "grad_norm": 10.368574142456055, |
| "learning_rate": 1.9998580476117436e-07, |
| "loss": 1.0014, |
| "step": 12640 |
| }, |
| { |
| "epoch": 0.011652231102890582, |
| "grad_norm": 10.339253425598145, |
| "learning_rate": 1.9998578036083092e-07, |
| "loss": 1.0195, |
| "step": 12650 |
| }, |
| { |
| "epoch": 0.01166144235277429, |
| "grad_norm": 12.082822799682617, |
| "learning_rate": 1.9998575593953603e-07, |
| "loss": 1.0354, |
| "step": 12660 |
| }, |
| { |
| "epoch": 0.011670653602657998, |
| "grad_norm": 10.674452781677246, |
| "learning_rate": 1.9998573149728958e-07, |
| "loss": 1.0124, |
| "step": 12670 |
| }, |
| { |
| "epoch": 0.011679864852541706, |
| "grad_norm": 11.495284080505371, |
| "learning_rate": 1.9998570703409167e-07, |
| "loss": 1.027, |
| "step": 12680 |
| }, |
| { |
| "epoch": 0.011689076102425415, |
| "grad_norm": 10.515803337097168, |
| "learning_rate": 1.9998568254994227e-07, |
| "loss": 1.0523, |
| "step": 12690 |
| }, |
| { |
| "epoch": 0.011698287352309123, |
| "grad_norm": 11.500944137573242, |
| "learning_rate": 1.999856580448414e-07, |
| "loss": 1.0528, |
| "step": 12700 |
| }, |
| { |
| "epoch": 0.01170749860219283, |
| "grad_norm": 10.046072959899902, |
| "learning_rate": 1.99985633518789e-07, |
| "loss": 1.0172, |
| "step": 12710 |
| }, |
| { |
| "epoch": 0.011716709852076538, |
| "grad_norm": 11.040961265563965, |
| "learning_rate": 1.9998560897178516e-07, |
| "loss": 1.0304, |
| "step": 12720 |
| }, |
| { |
| "epoch": 0.011725921101960246, |
| "grad_norm": 12.23554515838623, |
| "learning_rate": 1.9998558440382984e-07, |
| "loss": 1.0828, |
| "step": 12730 |
| }, |
| { |
| "epoch": 0.011735132351843954, |
| "grad_norm": 9.606940269470215, |
| "learning_rate": 1.9998555981492306e-07, |
| "loss": 1.0497, |
| "step": 12740 |
| }, |
| { |
| "epoch": 0.011744343601727662, |
| "grad_norm": 10.060453414916992, |
| "learning_rate": 1.9998553520506483e-07, |
| "loss": 1.0314, |
| "step": 12750 |
| }, |
| { |
| "epoch": 0.01175355485161137, |
| "grad_norm": 11.131389617919922, |
| "learning_rate": 1.9998551057425512e-07, |
| "loss": 1.0006, |
| "step": 12760 |
| }, |
| { |
| "epoch": 0.011762766101495077, |
| "grad_norm": 10.40088176727295, |
| "learning_rate": 1.99985485922494e-07, |
| "loss": 1.0349, |
| "step": 12770 |
| }, |
| { |
| "epoch": 0.011771977351378787, |
| "grad_norm": 9.745380401611328, |
| "learning_rate": 1.9998546124978137e-07, |
| "loss": 1.0703, |
| "step": 12780 |
| }, |
| { |
| "epoch": 0.011781188601262494, |
| "grad_norm": 11.209060668945312, |
| "learning_rate": 1.9998543655611736e-07, |
| "loss": 1.0428, |
| "step": 12790 |
| }, |
| { |
| "epoch": 0.011790399851146202, |
| "grad_norm": 11.013096809387207, |
| "learning_rate": 1.999854118415019e-07, |
| "loss": 1.0316, |
| "step": 12800 |
| }, |
| { |
| "epoch": 0.01179961110102991, |
| "grad_norm": 12.18641471862793, |
| "learning_rate": 1.99985387105935e-07, |
| "loss": 1.0093, |
| "step": 12810 |
| }, |
| { |
| "epoch": 0.011808822350913618, |
| "grad_norm": 10.719549179077148, |
| "learning_rate": 1.9998536234941667e-07, |
| "loss": 1.0609, |
| "step": 12820 |
| }, |
| { |
| "epoch": 0.011818033600797325, |
| "grad_norm": 12.071653366088867, |
| "learning_rate": 1.9998533757194696e-07, |
| "loss": 1.0347, |
| "step": 12830 |
| }, |
| { |
| "epoch": 0.011827244850681033, |
| "grad_norm": 10.861738204956055, |
| "learning_rate": 1.9998531277352578e-07, |
| "loss": 1.0716, |
| "step": 12840 |
| }, |
| { |
| "epoch": 0.011836456100564741, |
| "grad_norm": 18.263153076171875, |
| "learning_rate": 1.9998528795415322e-07, |
| "loss": 1.0903, |
| "step": 12850 |
| }, |
| { |
| "epoch": 0.01184566735044845, |
| "grad_norm": 13.22293472290039, |
| "learning_rate": 1.9998526311382927e-07, |
| "loss": 1.0514, |
| "step": 12860 |
| }, |
| { |
| "epoch": 0.011854878600332158, |
| "grad_norm": 11.849961280822754, |
| "learning_rate": 1.999852382525539e-07, |
| "loss": 1.0351, |
| "step": 12870 |
| }, |
| { |
| "epoch": 0.011864089850215866, |
| "grad_norm": 10.727474212646484, |
| "learning_rate": 1.9998521337032715e-07, |
| "loss": 1.0244, |
| "step": 12880 |
| }, |
| { |
| "epoch": 0.011873301100099574, |
| "grad_norm": 10.668787956237793, |
| "learning_rate": 1.9998518846714901e-07, |
| "loss": 1.0131, |
| "step": 12890 |
| }, |
| { |
| "epoch": 0.011882512349983282, |
| "grad_norm": 11.884491920471191, |
| "learning_rate": 1.999851635430195e-07, |
| "loss": 1.0063, |
| "step": 12900 |
| }, |
| { |
| "epoch": 0.01189172359986699, |
| "grad_norm": 12.002286911010742, |
| "learning_rate": 1.9998513859793858e-07, |
| "loss": 1.029, |
| "step": 12910 |
| }, |
| { |
| "epoch": 0.011900934849750697, |
| "grad_norm": 10.8087739944458, |
| "learning_rate": 1.999851136319063e-07, |
| "loss": 1.0079, |
| "step": 12920 |
| }, |
| { |
| "epoch": 0.011910146099634405, |
| "grad_norm": 11.131049156188965, |
| "learning_rate": 1.9998508864492268e-07, |
| "loss": 1.0363, |
| "step": 12930 |
| }, |
| { |
| "epoch": 0.011919357349518114, |
| "grad_norm": 11.638226509094238, |
| "learning_rate": 1.9998506363698767e-07, |
| "loss": 1.0377, |
| "step": 12940 |
| }, |
| { |
| "epoch": 0.011928568599401822, |
| "grad_norm": 10.25147819519043, |
| "learning_rate": 1.9998503860810132e-07, |
| "loss": 1.0531, |
| "step": 12950 |
| }, |
| { |
| "epoch": 0.01193777984928553, |
| "grad_norm": 11.84463119506836, |
| "learning_rate": 1.999850135582636e-07, |
| "loss": 1.0127, |
| "step": 12960 |
| }, |
| { |
| "epoch": 0.011946991099169238, |
| "grad_norm": 11.547938346862793, |
| "learning_rate": 1.9998498848747454e-07, |
| "loss": 1.0219, |
| "step": 12970 |
| }, |
| { |
| "epoch": 0.011956202349052945, |
| "grad_norm": 10.293601989746094, |
| "learning_rate": 1.9998496339573414e-07, |
| "loss": 0.9894, |
| "step": 12980 |
| }, |
| { |
| "epoch": 0.011965413598936653, |
| "grad_norm": 10.263101577758789, |
| "learning_rate": 1.9998493828304243e-07, |
| "loss": 1.0775, |
| "step": 12990 |
| }, |
| { |
| "epoch": 0.011974624848820361, |
| "grad_norm": 9.939203262329102, |
| "learning_rate": 1.9998491314939936e-07, |
| "loss": 1.0128, |
| "step": 13000 |
| }, |
| { |
| "epoch": 0.011983836098704069, |
| "grad_norm": 12.134598731994629, |
| "learning_rate": 1.9998488799480498e-07, |
| "loss": 1.0234, |
| "step": 13010 |
| }, |
| { |
| "epoch": 0.011993047348587776, |
| "grad_norm": 11.045609474182129, |
| "learning_rate": 1.9998486281925927e-07, |
| "loss": 1.0378, |
| "step": 13020 |
| }, |
| { |
| "epoch": 0.012002258598471486, |
| "grad_norm": 10.733898162841797, |
| "learning_rate": 1.9998483762276223e-07, |
| "loss": 1.0611, |
| "step": 13030 |
| }, |
| { |
| "epoch": 0.012011469848355194, |
| "grad_norm": 10.85460090637207, |
| "learning_rate": 1.9998481240531393e-07, |
| "loss": 1.0559, |
| "step": 13040 |
| }, |
| { |
| "epoch": 0.012020681098238901, |
| "grad_norm": 11.535517692565918, |
| "learning_rate": 1.999847871669143e-07, |
| "loss": 1.0258, |
| "step": 13050 |
| }, |
| { |
| "epoch": 0.01202989234812261, |
| "grad_norm": 9.142783164978027, |
| "learning_rate": 1.9998476190756336e-07, |
| "loss": 1.0008, |
| "step": 13060 |
| }, |
| { |
| "epoch": 0.012039103598006317, |
| "grad_norm": 9.973493576049805, |
| "learning_rate": 1.9998473662726114e-07, |
| "loss": 1.0261, |
| "step": 13070 |
| }, |
| { |
| "epoch": 0.012048314847890025, |
| "grad_norm": 11.820738792419434, |
| "learning_rate": 1.9998471132600764e-07, |
| "loss": 1.0288, |
| "step": 13080 |
| }, |
| { |
| "epoch": 0.012057526097773733, |
| "grad_norm": 13.60704231262207, |
| "learning_rate": 1.9998468600380286e-07, |
| "loss": 1.0377, |
| "step": 13090 |
| }, |
| { |
| "epoch": 0.01206673734765744, |
| "grad_norm": 11.284205436706543, |
| "learning_rate": 1.9998466066064677e-07, |
| "loss": 1.0261, |
| "step": 13100 |
| }, |
| { |
| "epoch": 0.01207594859754115, |
| "grad_norm": 19.59893798828125, |
| "learning_rate": 1.9998463529653943e-07, |
| "loss": 1.0112, |
| "step": 13110 |
| }, |
| { |
| "epoch": 0.012085159847424858, |
| "grad_norm": 10.11678695678711, |
| "learning_rate": 1.999846099114808e-07, |
| "loss": 1.0143, |
| "step": 13120 |
| }, |
| { |
| "epoch": 0.012094371097308565, |
| "grad_norm": 9.816136360168457, |
| "learning_rate": 1.9998458450547096e-07, |
| "loss": 1.0236, |
| "step": 13130 |
| }, |
| { |
| "epoch": 0.012103582347192273, |
| "grad_norm": 10.222819328308105, |
| "learning_rate": 1.999845590785098e-07, |
| "loss": 1.0507, |
| "step": 13140 |
| }, |
| { |
| "epoch": 0.01211279359707598, |
| "grad_norm": 11.43911361694336, |
| "learning_rate": 1.9998453363059747e-07, |
| "loss": 1.041, |
| "step": 13150 |
| }, |
| { |
| "epoch": 0.012122004846959689, |
| "grad_norm": 10.485759735107422, |
| "learning_rate": 1.999845081617338e-07, |
| "loss": 1.0063, |
| "step": 13160 |
| }, |
| { |
| "epoch": 0.012131216096843396, |
| "grad_norm": 10.929678916931152, |
| "learning_rate": 1.9998448267191897e-07, |
| "loss": 1.0372, |
| "step": 13170 |
| }, |
| { |
| "epoch": 0.012140427346727104, |
| "grad_norm": 11.233808517456055, |
| "learning_rate": 1.9998445716115285e-07, |
| "loss": 1.0265, |
| "step": 13180 |
| }, |
| { |
| "epoch": 0.012149638596610812, |
| "grad_norm": 11.181950569152832, |
| "learning_rate": 1.9998443162943556e-07, |
| "loss": 1.0215, |
| "step": 13190 |
| }, |
| { |
| "epoch": 0.012158849846494521, |
| "grad_norm": 10.015949249267578, |
| "learning_rate": 1.9998440607676698e-07, |
| "loss": 1.0205, |
| "step": 13200 |
| }, |
| { |
| "epoch": 0.01216806109637823, |
| "grad_norm": 10.545913696289062, |
| "learning_rate": 1.9998438050314723e-07, |
| "loss": 1.0381, |
| "step": 13210 |
| }, |
| { |
| "epoch": 0.012177272346261937, |
| "grad_norm": 10.960654258728027, |
| "learning_rate": 1.9998435490857626e-07, |
| "loss": 1.0422, |
| "step": 13220 |
| }, |
| { |
| "epoch": 0.012186483596145645, |
| "grad_norm": 10.414505004882812, |
| "learning_rate": 1.9998432929305408e-07, |
| "loss": 0.9684, |
| "step": 13230 |
| }, |
| { |
| "epoch": 0.012195694846029352, |
| "grad_norm": 9.997944831848145, |
| "learning_rate": 1.9998430365658067e-07, |
| "loss": 1.0486, |
| "step": 13240 |
| }, |
| { |
| "epoch": 0.01220490609591306, |
| "grad_norm": 11.961804389953613, |
| "learning_rate": 1.999842779991561e-07, |
| "loss": 1.0382, |
| "step": 13250 |
| }, |
| { |
| "epoch": 0.012214117345796768, |
| "grad_norm": 18.367921829223633, |
| "learning_rate": 1.999842523207803e-07, |
| "loss": 1.0724, |
| "step": 13260 |
| }, |
| { |
| "epoch": 0.012223328595680476, |
| "grad_norm": 12.685203552246094, |
| "learning_rate": 1.9998422662145335e-07, |
| "loss": 1.0054, |
| "step": 13270 |
| }, |
| { |
| "epoch": 0.012232539845564185, |
| "grad_norm": 11.278654098510742, |
| "learning_rate": 1.999842009011752e-07, |
| "loss": 1.0159, |
| "step": 13280 |
| }, |
| { |
| "epoch": 0.012241751095447893, |
| "grad_norm": 10.944748878479004, |
| "learning_rate": 1.999841751599459e-07, |
| "loss": 1.0072, |
| "step": 13290 |
| }, |
| { |
| "epoch": 0.0122509623453316, |
| "grad_norm": 15.634936332702637, |
| "learning_rate": 1.999841493977654e-07, |
| "loss": 1.0379, |
| "step": 13300 |
| }, |
| { |
| "epoch": 0.012260173595215309, |
| "grad_norm": 12.31917667388916, |
| "learning_rate": 1.9998412361463376e-07, |
| "loss": 1.0181, |
| "step": 13310 |
| }, |
| { |
| "epoch": 0.012269384845099016, |
| "grad_norm": 9.977714538574219, |
| "learning_rate": 1.9998409781055093e-07, |
| "loss": 1.0285, |
| "step": 13320 |
| }, |
| { |
| "epoch": 0.012278596094982724, |
| "grad_norm": 10.614492416381836, |
| "learning_rate": 1.9998407198551695e-07, |
| "loss": 1.0229, |
| "step": 13330 |
| }, |
| { |
| "epoch": 0.012287807344866432, |
| "grad_norm": 10.815546035766602, |
| "learning_rate": 1.9998404613953182e-07, |
| "loss": 0.9933, |
| "step": 13340 |
| }, |
| { |
| "epoch": 0.01229701859475014, |
| "grad_norm": 11.779792785644531, |
| "learning_rate": 1.9998402027259558e-07, |
| "loss": 1.0083, |
| "step": 13350 |
| }, |
| { |
| "epoch": 0.012306229844633847, |
| "grad_norm": 9.693839073181152, |
| "learning_rate": 1.9998399438470818e-07, |
| "loss": 1.0098, |
| "step": 13360 |
| }, |
| { |
| "epoch": 0.012315441094517557, |
| "grad_norm": 22.561391830444336, |
| "learning_rate": 1.9998396847586964e-07, |
| "loss": 1.0529, |
| "step": 13370 |
| }, |
| { |
| "epoch": 0.012324652344401265, |
| "grad_norm": 10.351146697998047, |
| "learning_rate": 1.9998394254608e-07, |
| "loss": 1.0249, |
| "step": 13380 |
| }, |
| { |
| "epoch": 0.012333863594284972, |
| "grad_norm": 9.872284889221191, |
| "learning_rate": 1.999839165953392e-07, |
| "loss": 1.0067, |
| "step": 13390 |
| }, |
| { |
| "epoch": 0.01234307484416868, |
| "grad_norm": 11.411335945129395, |
| "learning_rate": 1.9998389062364733e-07, |
| "loss": 1.0188, |
| "step": 13400 |
| }, |
| { |
| "epoch": 0.012352286094052388, |
| "grad_norm": 11.088921546936035, |
| "learning_rate": 1.9998386463100432e-07, |
| "loss": 1.0275, |
| "step": 13410 |
| }, |
| { |
| "epoch": 0.012361497343936096, |
| "grad_norm": 11.77889347076416, |
| "learning_rate": 1.9998383861741022e-07, |
| "loss": 1.0351, |
| "step": 13420 |
| }, |
| { |
| "epoch": 0.012370708593819803, |
| "grad_norm": 10.038293838500977, |
| "learning_rate": 1.99983812582865e-07, |
| "loss": 1.0043, |
| "step": 13430 |
| }, |
| { |
| "epoch": 0.012379919843703511, |
| "grad_norm": 12.374161720275879, |
| "learning_rate": 1.999837865273687e-07, |
| "loss": 1.0198, |
| "step": 13440 |
| }, |
| { |
| "epoch": 0.01238913109358722, |
| "grad_norm": 10.037971496582031, |
| "learning_rate": 1.999837604509213e-07, |
| "loss": 1.0284, |
| "step": 13450 |
| }, |
| { |
| "epoch": 0.012398342343470928, |
| "grad_norm": 10.498419761657715, |
| "learning_rate": 1.9998373435352285e-07, |
| "loss": 1.0631, |
| "step": 13460 |
| }, |
| { |
| "epoch": 0.012407553593354636, |
| "grad_norm": 9.958725929260254, |
| "learning_rate": 1.9998370823517327e-07, |
| "loss": 1.0012, |
| "step": 13470 |
| }, |
| { |
| "epoch": 0.012416764843238344, |
| "grad_norm": 11.818429946899414, |
| "learning_rate": 1.9998368209587265e-07, |
| "loss": 1.0254, |
| "step": 13480 |
| }, |
| { |
| "epoch": 0.012425976093122052, |
| "grad_norm": 12.042304992675781, |
| "learning_rate": 1.99983655935621e-07, |
| "loss": 1.0064, |
| "step": 13490 |
| }, |
| { |
| "epoch": 0.01243518734300576, |
| "grad_norm": 11.252850532531738, |
| "learning_rate": 1.9998362975441824e-07, |
| "loss": 1.0248, |
| "step": 13500 |
| }, |
| { |
| "epoch": 0.012444398592889467, |
| "grad_norm": 9.87796401977539, |
| "learning_rate": 1.999836035522644e-07, |
| "loss": 1.0164, |
| "step": 13510 |
| }, |
| { |
| "epoch": 0.012453609842773175, |
| "grad_norm": 10.7407808303833, |
| "learning_rate": 1.9998357732915957e-07, |
| "loss": 1.0893, |
| "step": 13520 |
| }, |
| { |
| "epoch": 0.012462821092656885, |
| "grad_norm": 12.11376667022705, |
| "learning_rate": 1.9998355108510366e-07, |
| "loss": 1.0221, |
| "step": 13530 |
| }, |
| { |
| "epoch": 0.012472032342540592, |
| "grad_norm": 11.665279388427734, |
| "learning_rate": 1.999835248200967e-07, |
| "loss": 1.0469, |
| "step": 13540 |
| }, |
| { |
| "epoch": 0.0124812435924243, |
| "grad_norm": 9.829465866088867, |
| "learning_rate": 1.9998349853413874e-07, |
| "loss": 1.004, |
| "step": 13550 |
| }, |
| { |
| "epoch": 0.012490454842308008, |
| "grad_norm": 10.644261360168457, |
| "learning_rate": 1.9998347222722973e-07, |
| "loss": 1.0318, |
| "step": 13560 |
| }, |
| { |
| "epoch": 0.012499666092191716, |
| "grad_norm": 11.00671100616455, |
| "learning_rate": 1.999834458993697e-07, |
| "loss": 1.0613, |
| "step": 13570 |
| }, |
| { |
| "epoch": 0.012508877342075423, |
| "grad_norm": 10.036681175231934, |
| "learning_rate": 1.9998341955055867e-07, |
| "loss": 0.9904, |
| "step": 13580 |
| }, |
| { |
| "epoch": 0.012518088591959131, |
| "grad_norm": 10.51473617553711, |
| "learning_rate": 1.999833931807966e-07, |
| "loss": 1.0255, |
| "step": 13590 |
| }, |
| { |
| "epoch": 0.012527299841842839, |
| "grad_norm": 10.107020378112793, |
| "learning_rate": 1.9998336679008354e-07, |
| "loss": 1.0542, |
| "step": 13600 |
| }, |
| { |
| "epoch": 0.012536511091726547, |
| "grad_norm": 10.273112297058105, |
| "learning_rate": 1.9998334037841947e-07, |
| "loss": 1.0328, |
| "step": 13610 |
| }, |
| { |
| "epoch": 0.012545722341610256, |
| "grad_norm": 14.128885269165039, |
| "learning_rate": 1.9998331394580442e-07, |
| "loss": 1.0338, |
| "step": 13620 |
| }, |
| { |
| "epoch": 0.012554933591493964, |
| "grad_norm": 18.069040298461914, |
| "learning_rate": 1.9998328749223838e-07, |
| "loss": 1.0476, |
| "step": 13630 |
| }, |
| { |
| "epoch": 0.012564144841377672, |
| "grad_norm": 11.211135864257812, |
| "learning_rate": 1.9998326101772135e-07, |
| "loss": 1.0302, |
| "step": 13640 |
| }, |
| { |
| "epoch": 0.01257335609126138, |
| "grad_norm": 10.113682746887207, |
| "learning_rate": 1.9998323452225333e-07, |
| "loss": 1.0237, |
| "step": 13650 |
| }, |
| { |
| "epoch": 0.012582567341145087, |
| "grad_norm": 11.019082069396973, |
| "learning_rate": 1.9998320800583435e-07, |
| "loss": 1.0692, |
| "step": 13660 |
| }, |
| { |
| "epoch": 0.012591778591028795, |
| "grad_norm": 12.991950035095215, |
| "learning_rate": 1.999831814684644e-07, |
| "loss": 1.0609, |
| "step": 13670 |
| }, |
| { |
| "epoch": 0.012600989840912503, |
| "grad_norm": 10.324051856994629, |
| "learning_rate": 1.999831549101435e-07, |
| "loss": 1.0288, |
| "step": 13680 |
| }, |
| { |
| "epoch": 0.01261020109079621, |
| "grad_norm": 12.9264554977417, |
| "learning_rate": 1.999831283308716e-07, |
| "loss": 1.011, |
| "step": 13690 |
| }, |
| { |
| "epoch": 0.01261941234067992, |
| "grad_norm": 10.522852897644043, |
| "learning_rate": 1.9998310173064881e-07, |
| "loss": 0.9877, |
| "step": 13700 |
| }, |
| { |
| "epoch": 0.012628623590563628, |
| "grad_norm": 11.253312110900879, |
| "learning_rate": 1.9998307510947502e-07, |
| "loss": 1.0459, |
| "step": 13710 |
| }, |
| { |
| "epoch": 0.012637834840447336, |
| "grad_norm": 10.876058578491211, |
| "learning_rate": 1.9998304846735034e-07, |
| "loss": 1.0422, |
| "step": 13720 |
| }, |
| { |
| "epoch": 0.012647046090331043, |
| "grad_norm": 10.079529762268066, |
| "learning_rate": 1.999830218042747e-07, |
| "loss": 1.0558, |
| "step": 13730 |
| }, |
| { |
| "epoch": 0.012656257340214751, |
| "grad_norm": 11.479740142822266, |
| "learning_rate": 1.999829951202481e-07, |
| "loss": 1.0369, |
| "step": 13740 |
| }, |
| { |
| "epoch": 0.012665468590098459, |
| "grad_norm": 12.495981216430664, |
| "learning_rate": 1.9998296841527065e-07, |
| "loss": 1.1261, |
| "step": 13750 |
| }, |
| { |
| "epoch": 0.012674679839982167, |
| "grad_norm": 16.66766357421875, |
| "learning_rate": 1.9998294168934222e-07, |
| "loss": 1.0237, |
| "step": 13760 |
| }, |
| { |
| "epoch": 0.012683891089865874, |
| "grad_norm": 10.03262996673584, |
| "learning_rate": 1.999829149424629e-07, |
| "loss": 1.0349, |
| "step": 13770 |
| }, |
| { |
| "epoch": 0.012693102339749582, |
| "grad_norm": 11.309409141540527, |
| "learning_rate": 1.999828881746327e-07, |
| "loss": 1.0292, |
| "step": 13780 |
| }, |
| { |
| "epoch": 0.012702313589633292, |
| "grad_norm": 9.323826789855957, |
| "learning_rate": 1.9998286138585156e-07, |
| "loss": 1.0257, |
| "step": 13790 |
| }, |
| { |
| "epoch": 0.012711524839517, |
| "grad_norm": 10.989248275756836, |
| "learning_rate": 1.9998283457611955e-07, |
| "loss": 1.0654, |
| "step": 13800 |
| }, |
| { |
| "epoch": 0.012720736089400707, |
| "grad_norm": 10.550274848937988, |
| "learning_rate": 1.9998280774543663e-07, |
| "loss": 0.9941, |
| "step": 13810 |
| }, |
| { |
| "epoch": 0.012729947339284415, |
| "grad_norm": 11.93160343170166, |
| "learning_rate": 1.9998278089380282e-07, |
| "loss": 1.0731, |
| "step": 13820 |
| }, |
| { |
| "epoch": 0.012739158589168123, |
| "grad_norm": 10.182496070861816, |
| "learning_rate": 1.9998275402121818e-07, |
| "loss": 0.9954, |
| "step": 13830 |
| }, |
| { |
| "epoch": 0.01274836983905183, |
| "grad_norm": 10.198827743530273, |
| "learning_rate": 1.9998272712768263e-07, |
| "loss": 1.0403, |
| "step": 13840 |
| }, |
| { |
| "epoch": 0.012757581088935538, |
| "grad_norm": 10.947816848754883, |
| "learning_rate": 1.9998270021319623e-07, |
| "loss": 1.0213, |
| "step": 13850 |
| }, |
| { |
| "epoch": 0.012766792338819246, |
| "grad_norm": 11.719832420349121, |
| "learning_rate": 1.9998267327775894e-07, |
| "loss": 1.0072, |
| "step": 13860 |
| }, |
| { |
| "epoch": 0.012776003588702955, |
| "grad_norm": 15.481405258178711, |
| "learning_rate": 1.9998264632137082e-07, |
| "loss": 0.9902, |
| "step": 13870 |
| }, |
| { |
| "epoch": 0.012785214838586663, |
| "grad_norm": 12.571460723876953, |
| "learning_rate": 1.9998261934403185e-07, |
| "loss": 1.028, |
| "step": 13880 |
| }, |
| { |
| "epoch": 0.012794426088470371, |
| "grad_norm": 10.477856636047363, |
| "learning_rate": 1.9998259234574201e-07, |
| "loss": 1.0406, |
| "step": 13890 |
| }, |
| { |
| "epoch": 0.012803637338354079, |
| "grad_norm": 10.395843505859375, |
| "learning_rate": 1.9998256532650138e-07, |
| "loss": 1.0299, |
| "step": 13900 |
| }, |
| { |
| "epoch": 0.012812848588237787, |
| "grad_norm": 10.972112655639648, |
| "learning_rate": 1.9998253828630988e-07, |
| "loss": 1.0043, |
| "step": 13910 |
| }, |
| { |
| "epoch": 0.012822059838121494, |
| "grad_norm": 10.709739685058594, |
| "learning_rate": 1.9998251122516759e-07, |
| "loss": 1.0264, |
| "step": 13920 |
| }, |
| { |
| "epoch": 0.012831271088005202, |
| "grad_norm": 27.398651123046875, |
| "learning_rate": 1.9998248414307443e-07, |
| "loss": 1.0046, |
| "step": 13930 |
| }, |
| { |
| "epoch": 0.01284048233788891, |
| "grad_norm": 9.787290573120117, |
| "learning_rate": 1.999824570400305e-07, |
| "loss": 1.0026, |
| "step": 13940 |
| }, |
| { |
| "epoch": 0.012849693587772618, |
| "grad_norm": 11.304927825927734, |
| "learning_rate": 1.999824299160357e-07, |
| "loss": 1.0559, |
| "step": 13950 |
| }, |
| { |
| "epoch": 0.012858904837656327, |
| "grad_norm": 9.933921813964844, |
| "learning_rate": 1.9998240277109014e-07, |
| "loss": 1.0568, |
| "step": 13960 |
| }, |
| { |
| "epoch": 0.012868116087540035, |
| "grad_norm": 10.142529487609863, |
| "learning_rate": 1.9998237560519377e-07, |
| "loss": 1.0349, |
| "step": 13970 |
| }, |
| { |
| "epoch": 0.012877327337423743, |
| "grad_norm": 12.451525688171387, |
| "learning_rate": 1.9998234841834662e-07, |
| "loss": 1.0223, |
| "step": 13980 |
| }, |
| { |
| "epoch": 0.01288653858730745, |
| "grad_norm": 10.039769172668457, |
| "learning_rate": 1.9998232121054867e-07, |
| "loss": 0.9899, |
| "step": 13990 |
| }, |
| { |
| "epoch": 0.012895749837191158, |
| "grad_norm": 11.092480659484863, |
| "learning_rate": 1.9998229398179994e-07, |
| "loss": 1.0509, |
| "step": 14000 |
| }, |
| { |
| "epoch": 0.012904961087074866, |
| "grad_norm": 10.24864673614502, |
| "learning_rate": 1.9998226673210043e-07, |
| "loss": 1.1199, |
| "step": 14010 |
| }, |
| { |
| "epoch": 0.012914172336958574, |
| "grad_norm": 9.621000289916992, |
| "learning_rate": 1.9998223946145017e-07, |
| "loss": 1.0175, |
| "step": 14020 |
| }, |
| { |
| "epoch": 0.012923383586842281, |
| "grad_norm": 11.19433307647705, |
| "learning_rate": 1.999822121698491e-07, |
| "loss": 1.066, |
| "step": 14030 |
| }, |
| { |
| "epoch": 0.012932594836725991, |
| "grad_norm": 10.21904182434082, |
| "learning_rate": 1.9998218485729735e-07, |
| "loss": 1.045, |
| "step": 14040 |
| }, |
| { |
| "epoch": 0.012941806086609699, |
| "grad_norm": 11.708386421203613, |
| "learning_rate": 1.9998215752379478e-07, |
| "loss": 1.047, |
| "step": 14050 |
| }, |
| { |
| "epoch": 0.012951017336493406, |
| "grad_norm": 11.216705322265625, |
| "learning_rate": 1.999821301693415e-07, |
| "loss": 1.0468, |
| "step": 14060 |
| }, |
| { |
| "epoch": 0.012960228586377114, |
| "grad_norm": 10.418187141418457, |
| "learning_rate": 1.9998210279393742e-07, |
| "loss": 1.0365, |
| "step": 14070 |
| }, |
| { |
| "epoch": 0.012969439836260822, |
| "grad_norm": 9.802369117736816, |
| "learning_rate": 1.9998207539758268e-07, |
| "loss": 1.042, |
| "step": 14080 |
| }, |
| { |
| "epoch": 0.01297865108614453, |
| "grad_norm": 11.526812553405762, |
| "learning_rate": 1.9998204798027717e-07, |
| "loss": 1.0519, |
| "step": 14090 |
| }, |
| { |
| "epoch": 0.012987862336028238, |
| "grad_norm": 10.508868217468262, |
| "learning_rate": 1.9998202054202093e-07, |
| "loss": 1.0205, |
| "step": 14100 |
| }, |
| { |
| "epoch": 0.012997073585911945, |
| "grad_norm": 12.244237899780273, |
| "learning_rate": 1.9998199308281399e-07, |
| "loss": 1.054, |
| "step": 14110 |
| }, |
| { |
| "epoch": 0.013006284835795655, |
| "grad_norm": 12.022279739379883, |
| "learning_rate": 1.9998196560265632e-07, |
| "loss": 1.0382, |
| "step": 14120 |
| }, |
| { |
| "epoch": 0.013015496085679363, |
| "grad_norm": 10.809887886047363, |
| "learning_rate": 1.9998193810154796e-07, |
| "loss": 1.0436, |
| "step": 14130 |
| }, |
| { |
| "epoch": 0.01302470733556307, |
| "grad_norm": 10.311279296875, |
| "learning_rate": 1.9998191057948887e-07, |
| "loss": 1.0081, |
| "step": 14140 |
| }, |
| { |
| "epoch": 0.013033918585446778, |
| "grad_norm": 10.968958854675293, |
| "learning_rate": 1.999818830364791e-07, |
| "loss": 0.9886, |
| "step": 14150 |
| }, |
| { |
| "epoch": 0.013043129835330486, |
| "grad_norm": 9.940498352050781, |
| "learning_rate": 1.9998185547251865e-07, |
| "loss": 1.0222, |
| "step": 14160 |
| }, |
| { |
| "epoch": 0.013052341085214194, |
| "grad_norm": 10.495719909667969, |
| "learning_rate": 1.9998182788760752e-07, |
| "loss": 0.9766, |
| "step": 14170 |
| }, |
| { |
| "epoch": 0.013061552335097901, |
| "grad_norm": 10.64636516571045, |
| "learning_rate": 1.9998180028174572e-07, |
| "loss": 1.066, |
| "step": 14180 |
| }, |
| { |
| "epoch": 0.01307076358498161, |
| "grad_norm": 10.699214935302734, |
| "learning_rate": 1.9998177265493322e-07, |
| "loss": 1.0503, |
| "step": 14190 |
| }, |
| { |
| "epoch": 0.013079974834865317, |
| "grad_norm": 10.3624267578125, |
| "learning_rate": 1.9998174500717005e-07, |
| "loss": 0.9961, |
| "step": 14200 |
| }, |
| { |
| "epoch": 0.013089186084749026, |
| "grad_norm": 11.242952346801758, |
| "learning_rate": 1.9998171733845624e-07, |
| "loss": 1.0167, |
| "step": 14210 |
| }, |
| { |
| "epoch": 0.013098397334632734, |
| "grad_norm": 9.298620223999023, |
| "learning_rate": 1.9998168964879178e-07, |
| "loss": 1.0293, |
| "step": 14220 |
| }, |
| { |
| "epoch": 0.013107608584516442, |
| "grad_norm": 10.501712799072266, |
| "learning_rate": 1.9998166193817664e-07, |
| "loss": 1.0698, |
| "step": 14230 |
| }, |
| { |
| "epoch": 0.01311681983440015, |
| "grad_norm": 10.517026901245117, |
| "learning_rate": 1.9998163420661086e-07, |
| "loss": 1.0576, |
| "step": 14240 |
| }, |
| { |
| "epoch": 0.013126031084283857, |
| "grad_norm": 10.720295906066895, |
| "learning_rate": 1.999816064540945e-07, |
| "loss": 1.0307, |
| "step": 14250 |
| }, |
| { |
| "epoch": 0.013135242334167565, |
| "grad_norm": 10.878846168518066, |
| "learning_rate": 1.9998157868062745e-07, |
| "loss": 1.0042, |
| "step": 14260 |
| }, |
| { |
| "epoch": 0.013144453584051273, |
| "grad_norm": 14.173813819885254, |
| "learning_rate": 1.9998155088620982e-07, |
| "loss": 0.9953, |
| "step": 14270 |
| }, |
| { |
| "epoch": 0.01315366483393498, |
| "grad_norm": 10.729985237121582, |
| "learning_rate": 1.999815230708415e-07, |
| "loss": 1.0335, |
| "step": 14280 |
| }, |
| { |
| "epoch": 0.01316287608381869, |
| "grad_norm": 12.065109252929688, |
| "learning_rate": 1.999814952345226e-07, |
| "loss": 1.053, |
| "step": 14290 |
| }, |
| { |
| "epoch": 0.013172087333702398, |
| "grad_norm": 10.979692459106445, |
| "learning_rate": 1.9998146737725312e-07, |
| "loss": 1.0252, |
| "step": 14300 |
| }, |
| { |
| "epoch": 0.013181298583586106, |
| "grad_norm": 10.119261741638184, |
| "learning_rate": 1.99981439499033e-07, |
| "loss": 0.9999, |
| "step": 14310 |
| }, |
| { |
| "epoch": 0.013190509833469814, |
| "grad_norm": 11.453356742858887, |
| "learning_rate": 1.999814115998623e-07, |
| "loss": 1.0124, |
| "step": 14320 |
| }, |
| { |
| "epoch": 0.013199721083353521, |
| "grad_norm": 10.567078590393066, |
| "learning_rate": 1.99981383679741e-07, |
| "loss": 1.0754, |
| "step": 14330 |
| }, |
| { |
| "epoch": 0.013208932333237229, |
| "grad_norm": 10.189666748046875, |
| "learning_rate": 1.9998135573866915e-07, |
| "loss": 1.0126, |
| "step": 14340 |
| }, |
| { |
| "epoch": 0.013218143583120937, |
| "grad_norm": 11.933945655822754, |
| "learning_rate": 1.9998132777664667e-07, |
| "loss": 1.0469, |
| "step": 14350 |
| }, |
| { |
| "epoch": 0.013227354833004645, |
| "grad_norm": 10.099088668823242, |
| "learning_rate": 1.9998129979367365e-07, |
| "loss": 1.0056, |
| "step": 14360 |
| }, |
| { |
| "epoch": 0.013236566082888352, |
| "grad_norm": 10.710516929626465, |
| "learning_rate": 1.9998127178975004e-07, |
| "loss": 0.9989, |
| "step": 14370 |
| }, |
| { |
| "epoch": 0.013245777332772062, |
| "grad_norm": 11.486581802368164, |
| "learning_rate": 1.999812437648759e-07, |
| "loss": 1.0373, |
| "step": 14380 |
| }, |
| { |
| "epoch": 0.01325498858265577, |
| "grad_norm": 10.034515380859375, |
| "learning_rate": 1.9998121571905115e-07, |
| "loss": 0.9998, |
| "step": 14390 |
| }, |
| { |
| "epoch": 0.013264199832539477, |
| "grad_norm": 11.530479431152344, |
| "learning_rate": 1.999811876522759e-07, |
| "loss": 0.9943, |
| "step": 14400 |
| }, |
| { |
| "epoch": 0.013273411082423185, |
| "grad_norm": 12.000450134277344, |
| "learning_rate": 1.999811595645501e-07, |
| "loss": 1.0551, |
| "step": 14410 |
| }, |
| { |
| "epoch": 0.013282622332306893, |
| "grad_norm": 11.614253997802734, |
| "learning_rate": 1.9998113145587374e-07, |
| "loss": 1.0094, |
| "step": 14420 |
| }, |
| { |
| "epoch": 0.0132918335821906, |
| "grad_norm": 10.894906044006348, |
| "learning_rate": 1.9998110332624684e-07, |
| "loss": 1.0081, |
| "step": 14430 |
| }, |
| { |
| "epoch": 0.013301044832074308, |
| "grad_norm": 10.965391159057617, |
| "learning_rate": 1.9998107517566947e-07, |
| "loss": 1.0145, |
| "step": 14440 |
| }, |
| { |
| "epoch": 0.013310256081958016, |
| "grad_norm": 10.908135414123535, |
| "learning_rate": 1.9998104700414152e-07, |
| "loss": 1.0254, |
| "step": 14450 |
| }, |
| { |
| "epoch": 0.013319467331841726, |
| "grad_norm": 11.68498420715332, |
| "learning_rate": 1.9998101881166308e-07, |
| "loss": 1.0108, |
| "step": 14460 |
| }, |
| { |
| "epoch": 0.013328678581725434, |
| "grad_norm": 11.092029571533203, |
| "learning_rate": 1.9998099059823416e-07, |
| "loss": 1.0589, |
| "step": 14470 |
| }, |
| { |
| "epoch": 0.013337889831609141, |
| "grad_norm": 12.352855682373047, |
| "learning_rate": 1.999809623638547e-07, |
| "loss": 1.0074, |
| "step": 14480 |
| }, |
| { |
| "epoch": 0.013347101081492849, |
| "grad_norm": 11.044502258300781, |
| "learning_rate": 1.9998093410852474e-07, |
| "loss": 1.0132, |
| "step": 14490 |
| }, |
| { |
| "epoch": 0.013356312331376557, |
| "grad_norm": 11.899613380432129, |
| "learning_rate": 1.999809058322443e-07, |
| "loss": 1.0307, |
| "step": 14500 |
| }, |
| { |
| "epoch": 0.013365523581260265, |
| "grad_norm": 11.268917083740234, |
| "learning_rate": 1.9998087753501335e-07, |
| "loss": 1.0392, |
| "step": 14510 |
| }, |
| { |
| "epoch": 0.013374734831143972, |
| "grad_norm": 11.036169052124023, |
| "learning_rate": 1.9998084921683197e-07, |
| "loss": 1.0502, |
| "step": 14520 |
| }, |
| { |
| "epoch": 0.01338394608102768, |
| "grad_norm": 11.845355987548828, |
| "learning_rate": 1.9998082087770007e-07, |
| "loss": 1.0149, |
| "step": 14530 |
| }, |
| { |
| "epoch": 0.013393157330911388, |
| "grad_norm": 10.693422317504883, |
| "learning_rate": 1.9998079251761774e-07, |
| "loss": 1.029, |
| "step": 14540 |
| }, |
| { |
| "epoch": 0.013402368580795097, |
| "grad_norm": 11.055460929870605, |
| "learning_rate": 1.9998076413658492e-07, |
| "loss": 1.0338, |
| "step": 14550 |
| }, |
| { |
| "epoch": 0.013411579830678805, |
| "grad_norm": 10.551007270812988, |
| "learning_rate": 1.9998073573460167e-07, |
| "loss": 1.0369, |
| "step": 14560 |
| }, |
| { |
| "epoch": 0.013420791080562513, |
| "grad_norm": 9.539275169372559, |
| "learning_rate": 1.9998070731166796e-07, |
| "loss": 0.9764, |
| "step": 14570 |
| }, |
| { |
| "epoch": 0.01343000233044622, |
| "grad_norm": 9.62117862701416, |
| "learning_rate": 1.999806788677838e-07, |
| "loss": 0.9845, |
| "step": 14580 |
| }, |
| { |
| "epoch": 0.013439213580329928, |
| "grad_norm": 10.485407829284668, |
| "learning_rate": 1.9998065040294921e-07, |
| "loss": 0.9991, |
| "step": 14590 |
| }, |
| { |
| "epoch": 0.013448424830213636, |
| "grad_norm": 11.85799503326416, |
| "learning_rate": 1.9998062191716418e-07, |
| "loss": 1.0087, |
| "step": 14600 |
| }, |
| { |
| "epoch": 0.013457636080097344, |
| "grad_norm": 10.689120292663574, |
| "learning_rate": 1.9998059341042873e-07, |
| "loss": 1.0033, |
| "step": 14610 |
| }, |
| { |
| "epoch": 0.013466847329981052, |
| "grad_norm": 10.790801048278809, |
| "learning_rate": 1.9998056488274285e-07, |
| "loss": 0.988, |
| "step": 14620 |
| }, |
| { |
| "epoch": 0.013476058579864761, |
| "grad_norm": 11.873769760131836, |
| "learning_rate": 1.999805363341066e-07, |
| "loss": 1.0262, |
| "step": 14630 |
| }, |
| { |
| "epoch": 0.013485269829748469, |
| "grad_norm": 9.655999183654785, |
| "learning_rate": 1.999805077645199e-07, |
| "loss": 1.0097, |
| "step": 14640 |
| }, |
| { |
| "epoch": 0.013494481079632177, |
| "grad_norm": 11.321676254272461, |
| "learning_rate": 1.999804791739828e-07, |
| "loss": 1.0197, |
| "step": 14650 |
| }, |
| { |
| "epoch": 0.013503692329515884, |
| "grad_norm": 10.857213020324707, |
| "learning_rate": 1.999804505624953e-07, |
| "loss": 1.0194, |
| "step": 14660 |
| }, |
| { |
| "epoch": 0.013512903579399592, |
| "grad_norm": 10.409232139587402, |
| "learning_rate": 1.9998042193005744e-07, |
| "loss": 1.0039, |
| "step": 14670 |
| }, |
| { |
| "epoch": 0.0135221148292833, |
| "grad_norm": 10.471531867980957, |
| "learning_rate": 1.9998039327666917e-07, |
| "loss": 0.9858, |
| "step": 14680 |
| }, |
| { |
| "epoch": 0.013531326079167008, |
| "grad_norm": 10.706310272216797, |
| "learning_rate": 1.9998036460233055e-07, |
| "loss": 0.9922, |
| "step": 14690 |
| }, |
| { |
| "epoch": 0.013540537329050716, |
| "grad_norm": 10.247745513916016, |
| "learning_rate": 1.9998033590704152e-07, |
| "loss": 1.0335, |
| "step": 14700 |
| }, |
| { |
| "epoch": 0.013549748578934425, |
| "grad_norm": 10.638279914855957, |
| "learning_rate": 1.9998030719080216e-07, |
| "loss": 1.0425, |
| "step": 14710 |
| }, |
| { |
| "epoch": 0.013558959828818133, |
| "grad_norm": 11.47921371459961, |
| "learning_rate": 1.9998027845361243e-07, |
| "loss": 0.9817, |
| "step": 14720 |
| }, |
| { |
| "epoch": 0.01356817107870184, |
| "grad_norm": 10.082921028137207, |
| "learning_rate": 1.9998024969547233e-07, |
| "loss": 1.0294, |
| "step": 14730 |
| }, |
| { |
| "epoch": 0.013577382328585548, |
| "grad_norm": 11.140175819396973, |
| "learning_rate": 1.999802209163819e-07, |
| "loss": 1.0294, |
| "step": 14740 |
| }, |
| { |
| "epoch": 0.013586593578469256, |
| "grad_norm": 11.994719505310059, |
| "learning_rate": 1.9998019211634114e-07, |
| "loss": 0.9859, |
| "step": 14750 |
| }, |
| { |
| "epoch": 0.013595804828352964, |
| "grad_norm": 9.512284278869629, |
| "learning_rate": 1.9998016329535002e-07, |
| "loss": 1.0147, |
| "step": 14760 |
| }, |
| { |
| "epoch": 0.013605016078236672, |
| "grad_norm": 9.654474258422852, |
| "learning_rate": 1.999801344534086e-07, |
| "loss": 1.0405, |
| "step": 14770 |
| }, |
| { |
| "epoch": 0.01361422732812038, |
| "grad_norm": 10.747416496276855, |
| "learning_rate": 1.999801055905168e-07, |
| "loss": 1.0116, |
| "step": 14780 |
| }, |
| { |
| "epoch": 0.013623438578004087, |
| "grad_norm": 14.702574729919434, |
| "learning_rate": 1.9998007670667475e-07, |
| "loss": 1.029, |
| "step": 14790 |
| }, |
| { |
| "epoch": 0.013632649827887797, |
| "grad_norm": 11.341087341308594, |
| "learning_rate": 1.9998004780188234e-07, |
| "loss": 1.0071, |
| "step": 14800 |
| }, |
| { |
| "epoch": 0.013641861077771504, |
| "grad_norm": 11.673993110656738, |
| "learning_rate": 1.9998001887613963e-07, |
| "loss": 1.0189, |
| "step": 14810 |
| }, |
| { |
| "epoch": 0.013651072327655212, |
| "grad_norm": 10.635948181152344, |
| "learning_rate": 1.9997998992944661e-07, |
| "loss": 1.0177, |
| "step": 14820 |
| }, |
| { |
| "epoch": 0.01366028357753892, |
| "grad_norm": 11.475762367248535, |
| "learning_rate": 1.9997996096180335e-07, |
| "loss": 1.0327, |
| "step": 14830 |
| }, |
| { |
| "epoch": 0.013669494827422628, |
| "grad_norm": 10.284483909606934, |
| "learning_rate": 1.9997993197320976e-07, |
| "loss": 0.9777, |
| "step": 14840 |
| }, |
| { |
| "epoch": 0.013678706077306335, |
| "grad_norm": 12.186320304870605, |
| "learning_rate": 1.9997990296366593e-07, |
| "loss": 0.9725, |
| "step": 14850 |
| }, |
| { |
| "epoch": 0.013687917327190043, |
| "grad_norm": 10.065929412841797, |
| "learning_rate": 1.9997987393317176e-07, |
| "loss": 1.0289, |
| "step": 14860 |
| }, |
| { |
| "epoch": 0.013697128577073751, |
| "grad_norm": 10.997315406799316, |
| "learning_rate": 1.9997984488172738e-07, |
| "loss": 1.0715, |
| "step": 14870 |
| }, |
| { |
| "epoch": 0.01370633982695746, |
| "grad_norm": 11.145686149597168, |
| "learning_rate": 1.999798158093327e-07, |
| "loss": 1.0205, |
| "step": 14880 |
| }, |
| { |
| "epoch": 0.013715551076841168, |
| "grad_norm": 11.063886642456055, |
| "learning_rate": 1.9997978671598778e-07, |
| "loss": 1.0148, |
| "step": 14890 |
| }, |
| { |
| "epoch": 0.013724762326724876, |
| "grad_norm": 11.686251640319824, |
| "learning_rate": 1.9997975760169262e-07, |
| "loss": 1.0015, |
| "step": 14900 |
| }, |
| { |
| "epoch": 0.013733973576608584, |
| "grad_norm": 10.730671882629395, |
| "learning_rate": 1.999797284664472e-07, |
| "loss": 1.0081, |
| "step": 14910 |
| }, |
| { |
| "epoch": 0.013743184826492292, |
| "grad_norm": 10.751588821411133, |
| "learning_rate": 1.9997969931025153e-07, |
| "loss": 1.0329, |
| "step": 14920 |
| }, |
| { |
| "epoch": 0.013752396076376, |
| "grad_norm": 9.93537712097168, |
| "learning_rate": 1.9997967013310566e-07, |
| "loss": 1.007, |
| "step": 14930 |
| }, |
| { |
| "epoch": 0.013761607326259707, |
| "grad_norm": 10.01486587524414, |
| "learning_rate": 1.9997964093500953e-07, |
| "loss": 1.0286, |
| "step": 14940 |
| }, |
| { |
| "epoch": 0.013770818576143415, |
| "grad_norm": 11.721806526184082, |
| "learning_rate": 1.9997961171596321e-07, |
| "loss": 0.9858, |
| "step": 14950 |
| }, |
| { |
| "epoch": 0.013780029826027123, |
| "grad_norm": 11.199084281921387, |
| "learning_rate": 1.9997958247596665e-07, |
| "loss": 1.0249, |
| "step": 14960 |
| }, |
| { |
| "epoch": 0.013789241075910832, |
| "grad_norm": 10.438685417175293, |
| "learning_rate": 1.999795532150199e-07, |
| "loss": 1.0216, |
| "step": 14970 |
| }, |
| { |
| "epoch": 0.01379845232579454, |
| "grad_norm": 11.354863166809082, |
| "learning_rate": 1.9997952393312295e-07, |
| "loss": 1.0222, |
| "step": 14980 |
| }, |
| { |
| "epoch": 0.013807663575678248, |
| "grad_norm": 12.6505765914917, |
| "learning_rate": 1.999794946302758e-07, |
| "loss": 1.0157, |
| "step": 14990 |
| }, |
| { |
| "epoch": 0.013816874825561955, |
| "grad_norm": 10.83745288848877, |
| "learning_rate": 1.9997946530647845e-07, |
| "loss": 0.9961, |
| "step": 15000 |
| }, |
| { |
| "epoch": 0.013826086075445663, |
| "grad_norm": 10.989898681640625, |
| "learning_rate": 1.9997943596173093e-07, |
| "loss": 0.9805, |
| "step": 15010 |
| }, |
| { |
| "epoch": 0.013835297325329371, |
| "grad_norm": 11.012017250061035, |
| "learning_rate": 1.9997940659603322e-07, |
| "loss": 1.0345, |
| "step": 15020 |
| }, |
| { |
| "epoch": 0.013844508575213079, |
| "grad_norm": 10.916410446166992, |
| "learning_rate": 1.9997937720938536e-07, |
| "loss": 1.0092, |
| "step": 15030 |
| }, |
| { |
| "epoch": 0.013853719825096786, |
| "grad_norm": 10.169594764709473, |
| "learning_rate": 1.9997934780178731e-07, |
| "loss": 1.0161, |
| "step": 15040 |
| }, |
| { |
| "epoch": 0.013862931074980496, |
| "grad_norm": 11.434839248657227, |
| "learning_rate": 1.9997931837323915e-07, |
| "loss": 1.0381, |
| "step": 15050 |
| }, |
| { |
| "epoch": 0.013872142324864204, |
| "grad_norm": 9.908843994140625, |
| "learning_rate": 1.9997928892374079e-07, |
| "loss": 1.0374, |
| "step": 15060 |
| }, |
| { |
| "epoch": 0.013881353574747912, |
| "grad_norm": 10.155745506286621, |
| "learning_rate": 1.999792594532923e-07, |
| "loss": 0.9662, |
| "step": 15070 |
| }, |
| { |
| "epoch": 0.01389056482463162, |
| "grad_norm": 10.907580375671387, |
| "learning_rate": 1.9997922996189364e-07, |
| "loss": 1.0077, |
| "step": 15080 |
| }, |
| { |
| "epoch": 0.013899776074515327, |
| "grad_norm": 10.50309944152832, |
| "learning_rate": 1.999792004495449e-07, |
| "loss": 1.0019, |
| "step": 15090 |
| }, |
| { |
| "epoch": 0.013908987324399035, |
| "grad_norm": 12.115172386169434, |
| "learning_rate": 1.99979170916246e-07, |
| "loss": 1.0376, |
| "step": 15100 |
| }, |
| { |
| "epoch": 0.013918198574282743, |
| "grad_norm": 10.391730308532715, |
| "learning_rate": 1.9997914136199698e-07, |
| "loss": 1.0375, |
| "step": 15110 |
| }, |
| { |
| "epoch": 0.01392740982416645, |
| "grad_norm": 12.150830268859863, |
| "learning_rate": 1.9997911178679784e-07, |
| "loss": 1.0154, |
| "step": 15120 |
| }, |
| { |
| "epoch": 0.013936621074050158, |
| "grad_norm": 11.37088394165039, |
| "learning_rate": 1.9997908219064861e-07, |
| "loss": 0.9937, |
| "step": 15130 |
| }, |
| { |
| "epoch": 0.013945832323933868, |
| "grad_norm": 9.907126426696777, |
| "learning_rate": 1.9997905257354927e-07, |
| "loss": 1.0387, |
| "step": 15140 |
| }, |
| { |
| "epoch": 0.013955043573817575, |
| "grad_norm": 10.536436080932617, |
| "learning_rate": 1.9997902293549984e-07, |
| "loss": 1.0499, |
| "step": 15150 |
| }, |
| { |
| "epoch": 0.013964254823701283, |
| "grad_norm": 12.115398406982422, |
| "learning_rate": 1.999789932765003e-07, |
| "loss": 1.021, |
| "step": 15160 |
| }, |
| { |
| "epoch": 0.013973466073584991, |
| "grad_norm": 12.181248664855957, |
| "learning_rate": 1.9997896359655066e-07, |
| "loss": 1.0847, |
| "step": 15170 |
| }, |
| { |
| "epoch": 0.013982677323468699, |
| "grad_norm": 11.088659286499023, |
| "learning_rate": 1.9997893389565099e-07, |
| "loss": 1.0782, |
| "step": 15180 |
| }, |
| { |
| "epoch": 0.013991888573352406, |
| "grad_norm": 25.22232437133789, |
| "learning_rate": 1.9997890417380122e-07, |
| "loss": 0.9743, |
| "step": 15190 |
| }, |
| { |
| "epoch": 0.014001099823236114, |
| "grad_norm": 10.395514488220215, |
| "learning_rate": 1.999788744310014e-07, |
| "loss": 1.0277, |
| "step": 15200 |
| }, |
| { |
| "epoch": 0.014010311073119822, |
| "grad_norm": 14.201665878295898, |
| "learning_rate": 1.999788446672515e-07, |
| "loss": 1.0061, |
| "step": 15210 |
| }, |
| { |
| "epoch": 0.014019522323003531, |
| "grad_norm": 12.390545845031738, |
| "learning_rate": 1.9997881488255156e-07, |
| "loss": 1.0417, |
| "step": 15220 |
| }, |
| { |
| "epoch": 0.01402873357288724, |
| "grad_norm": 12.292132377624512, |
| "learning_rate": 1.9997878507690155e-07, |
| "loss": 1.0588, |
| "step": 15230 |
| }, |
| { |
| "epoch": 0.014037944822770947, |
| "grad_norm": 10.636080741882324, |
| "learning_rate": 1.9997875525030155e-07, |
| "loss": 1.0434, |
| "step": 15240 |
| }, |
| { |
| "epoch": 0.014047156072654655, |
| "grad_norm": 10.713860511779785, |
| "learning_rate": 1.999787254027515e-07, |
| "loss": 1.0395, |
| "step": 15250 |
| }, |
| { |
| "epoch": 0.014056367322538363, |
| "grad_norm": 10.314606666564941, |
| "learning_rate": 1.9997869553425137e-07, |
| "loss": 1.0413, |
| "step": 15260 |
| }, |
| { |
| "epoch": 0.01406557857242207, |
| "grad_norm": 10.961061477661133, |
| "learning_rate": 1.9997866564480126e-07, |
| "loss": 1.0657, |
| "step": 15270 |
| }, |
| { |
| "epoch": 0.014074789822305778, |
| "grad_norm": 10.971481323242188, |
| "learning_rate": 1.9997863573440114e-07, |
| "loss": 1.017, |
| "step": 15280 |
| }, |
| { |
| "epoch": 0.014084001072189486, |
| "grad_norm": 10.642735481262207, |
| "learning_rate": 1.99978605803051e-07, |
| "loss": 1.0234, |
| "step": 15290 |
| }, |
| { |
| "epoch": 0.014093212322073195, |
| "grad_norm": 9.57653522491455, |
| "learning_rate": 1.9997857585075085e-07, |
| "loss": 1.0137, |
| "step": 15300 |
| }, |
| { |
| "epoch": 0.014102423571956903, |
| "grad_norm": 10.176536560058594, |
| "learning_rate": 1.9997854587750073e-07, |
| "loss": 1.0048, |
| "step": 15310 |
| }, |
| { |
| "epoch": 0.01411163482184061, |
| "grad_norm": 10.436186790466309, |
| "learning_rate": 1.999785158833006e-07, |
| "loss": 1.0164, |
| "step": 15320 |
| }, |
| { |
| "epoch": 0.014120846071724319, |
| "grad_norm": 11.403667449951172, |
| "learning_rate": 1.999784858681505e-07, |
| "loss": 0.9884, |
| "step": 15330 |
| }, |
| { |
| "epoch": 0.014130057321608026, |
| "grad_norm": 11.524589538574219, |
| "learning_rate": 1.999784558320504e-07, |
| "loss": 1.032, |
| "step": 15340 |
| }, |
| { |
| "epoch": 0.014139268571491734, |
| "grad_norm": 13.009188652038574, |
| "learning_rate": 1.9997842577500034e-07, |
| "loss": 1.0384, |
| "step": 15350 |
| }, |
| { |
| "epoch": 0.014148479821375442, |
| "grad_norm": 11.528955459594727, |
| "learning_rate": 1.999783956970003e-07, |
| "loss": 1.0337, |
| "step": 15360 |
| }, |
| { |
| "epoch": 0.01415769107125915, |
| "grad_norm": 9.382002830505371, |
| "learning_rate": 1.999783655980503e-07, |
| "loss": 1.0415, |
| "step": 15370 |
| }, |
| { |
| "epoch": 0.014166902321142857, |
| "grad_norm": 9.43379020690918, |
| "learning_rate": 1.9997833547815038e-07, |
| "loss": 1.0669, |
| "step": 15380 |
| }, |
| { |
| "epoch": 0.014176113571026567, |
| "grad_norm": 13.102933883666992, |
| "learning_rate": 1.999783053373005e-07, |
| "loss": 1.0378, |
| "step": 15390 |
| }, |
| { |
| "epoch": 0.014185324820910275, |
| "grad_norm": 10.999958992004395, |
| "learning_rate": 1.9997827517550066e-07, |
| "loss": 1.0557, |
| "step": 15400 |
| }, |
| { |
| "epoch": 0.014194536070793982, |
| "grad_norm": 10.32621955871582, |
| "learning_rate": 1.9997824499275091e-07, |
| "loss": 0.997, |
| "step": 15410 |
| }, |
| { |
| "epoch": 0.01420374732067769, |
| "grad_norm": 9.563864707946777, |
| "learning_rate": 1.9997821478905121e-07, |
| "loss": 1.0619, |
| "step": 15420 |
| }, |
| { |
| "epoch": 0.014212958570561398, |
| "grad_norm": 23.16864585876465, |
| "learning_rate": 1.999781845644016e-07, |
| "loss": 1.0612, |
| "step": 15430 |
| }, |
| { |
| "epoch": 0.014222169820445106, |
| "grad_norm": 13.929885864257812, |
| "learning_rate": 1.9997815431880207e-07, |
| "loss": 1.009, |
| "step": 15440 |
| }, |
| { |
| "epoch": 0.014231381070328813, |
| "grad_norm": 10.578057289123535, |
| "learning_rate": 1.9997812405225266e-07, |
| "loss": 1.049, |
| "step": 15450 |
| }, |
| { |
| "epoch": 0.014240592320212521, |
| "grad_norm": 9.861318588256836, |
| "learning_rate": 1.9997809376475332e-07, |
| "loss": 1.0201, |
| "step": 15460 |
| }, |
| { |
| "epoch": 0.01424980357009623, |
| "grad_norm": 11.266529083251953, |
| "learning_rate": 1.9997806345630407e-07, |
| "loss": 1.0081, |
| "step": 15470 |
| }, |
| { |
| "epoch": 0.014259014819979939, |
| "grad_norm": 8.847119331359863, |
| "learning_rate": 1.9997803312690497e-07, |
| "loss": 0.9136, |
| "step": 15480 |
| }, |
| { |
| "epoch": 0.014268226069863646, |
| "grad_norm": 10.075629234313965, |
| "learning_rate": 1.9997800277655597e-07, |
| "loss": 1.0464, |
| "step": 15490 |
| }, |
| { |
| "epoch": 0.014277437319747354, |
| "grad_norm": 11.116888999938965, |
| "learning_rate": 1.999779724052571e-07, |
| "loss": 1.0437, |
| "step": 15500 |
| }, |
| { |
| "epoch": 0.014286648569631062, |
| "grad_norm": 10.67253303527832, |
| "learning_rate": 1.9997794201300833e-07, |
| "loss": 1.0199, |
| "step": 15510 |
| }, |
| { |
| "epoch": 0.01429585981951477, |
| "grad_norm": 9.810186386108398, |
| "learning_rate": 1.9997791159980972e-07, |
| "loss": 0.992, |
| "step": 15520 |
| }, |
| { |
| "epoch": 0.014305071069398477, |
| "grad_norm": 11.050532341003418, |
| "learning_rate": 1.9997788116566126e-07, |
| "loss": 1.0523, |
| "step": 15530 |
| }, |
| { |
| "epoch": 0.014314282319282185, |
| "grad_norm": 10.395670890808105, |
| "learning_rate": 1.9997785071056295e-07, |
| "loss": 0.9642, |
| "step": 15540 |
| }, |
| { |
| "epoch": 0.014323493569165893, |
| "grad_norm": 11.32052230834961, |
| "learning_rate": 1.9997782023451478e-07, |
| "loss": 1.0184, |
| "step": 15550 |
| }, |
| { |
| "epoch": 0.014332704819049602, |
| "grad_norm": 11.003552436828613, |
| "learning_rate": 1.9997778973751677e-07, |
| "loss": 1.0208, |
| "step": 15560 |
| }, |
| { |
| "epoch": 0.01434191606893331, |
| "grad_norm": 11.940206527709961, |
| "learning_rate": 1.9997775921956893e-07, |
| "loss": 1.0038, |
| "step": 15570 |
| }, |
| { |
| "epoch": 0.014351127318817018, |
| "grad_norm": 12.526101112365723, |
| "learning_rate": 1.9997772868067127e-07, |
| "loss": 1.0312, |
| "step": 15580 |
| }, |
| { |
| "epoch": 0.014360338568700726, |
| "grad_norm": 10.388029098510742, |
| "learning_rate": 1.9997769812082378e-07, |
| "loss": 0.9929, |
| "step": 15590 |
| }, |
| { |
| "epoch": 0.014369549818584433, |
| "grad_norm": 11.88277816772461, |
| "learning_rate": 1.9997766754002647e-07, |
| "loss": 1.0499, |
| "step": 15600 |
| }, |
| { |
| "epoch": 0.014378761068468141, |
| "grad_norm": 14.356674194335938, |
| "learning_rate": 1.9997763693827938e-07, |
| "loss": 1.0346, |
| "step": 15610 |
| }, |
| { |
| "epoch": 0.014387972318351849, |
| "grad_norm": 12.173201560974121, |
| "learning_rate": 1.9997760631558247e-07, |
| "loss": 0.9987, |
| "step": 15620 |
| }, |
| { |
| "epoch": 0.014397183568235557, |
| "grad_norm": 11.898418426513672, |
| "learning_rate": 1.999775756719358e-07, |
| "loss": 0.9973, |
| "step": 15630 |
| }, |
| { |
| "epoch": 0.014406394818119266, |
| "grad_norm": 11.622894287109375, |
| "learning_rate": 1.999775450073393e-07, |
| "loss": 1.0065, |
| "step": 15640 |
| }, |
| { |
| "epoch": 0.014415606068002974, |
| "grad_norm": 11.362640380859375, |
| "learning_rate": 1.9997751432179303e-07, |
| "loss": 0.9882, |
| "step": 15650 |
| }, |
| { |
| "epoch": 0.014424817317886682, |
| "grad_norm": 11.543924331665039, |
| "learning_rate": 1.99977483615297e-07, |
| "loss": 1.005, |
| "step": 15660 |
| }, |
| { |
| "epoch": 0.01443402856777039, |
| "grad_norm": 10.448874473571777, |
| "learning_rate": 1.999774528878512e-07, |
| "loss": 1.0093, |
| "step": 15670 |
| }, |
| { |
| "epoch": 0.014443239817654097, |
| "grad_norm": 11.865896224975586, |
| "learning_rate": 1.9997742213945562e-07, |
| "loss": 1.0215, |
| "step": 15680 |
| }, |
| { |
| "epoch": 0.014452451067537805, |
| "grad_norm": 10.817159652709961, |
| "learning_rate": 1.999773913701103e-07, |
| "loss": 1.0035, |
| "step": 15690 |
| }, |
| { |
| "epoch": 0.014461662317421513, |
| "grad_norm": 11.522754669189453, |
| "learning_rate": 1.9997736057981525e-07, |
| "loss": 1.07, |
| "step": 15700 |
| }, |
| { |
| "epoch": 0.01447087356730522, |
| "grad_norm": 10.102328300476074, |
| "learning_rate": 1.9997732976857043e-07, |
| "loss": 0.9871, |
| "step": 15710 |
| }, |
| { |
| "epoch": 0.014480084817188928, |
| "grad_norm": 10.84836483001709, |
| "learning_rate": 1.999772989363759e-07, |
| "loss": 0.9791, |
| "step": 15720 |
| }, |
| { |
| "epoch": 0.014489296067072638, |
| "grad_norm": 10.973687171936035, |
| "learning_rate": 1.999772680832316e-07, |
| "loss": 1.0278, |
| "step": 15730 |
| }, |
| { |
| "epoch": 0.014498507316956346, |
| "grad_norm": 10.168926239013672, |
| "learning_rate": 1.9997723720913763e-07, |
| "loss": 1.0241, |
| "step": 15740 |
| }, |
| { |
| "epoch": 0.014507718566840053, |
| "grad_norm": 9.546493530273438, |
| "learning_rate": 1.999772063140939e-07, |
| "loss": 1.0236, |
| "step": 15750 |
| }, |
| { |
| "epoch": 0.014516929816723761, |
| "grad_norm": 12.213617324829102, |
| "learning_rate": 1.9997717539810047e-07, |
| "loss": 0.9798, |
| "step": 15760 |
| }, |
| { |
| "epoch": 0.014526141066607469, |
| "grad_norm": 10.88346004486084, |
| "learning_rate": 1.9997714446115733e-07, |
| "loss": 1.0406, |
| "step": 15770 |
| }, |
| { |
| "epoch": 0.014535352316491177, |
| "grad_norm": 10.08320140838623, |
| "learning_rate": 1.9997711350326452e-07, |
| "loss": 1.0344, |
| "step": 15780 |
| }, |
| { |
| "epoch": 0.014544563566374884, |
| "grad_norm": 18.26591682434082, |
| "learning_rate": 1.99977082524422e-07, |
| "loss": 0.9717, |
| "step": 15790 |
| }, |
| { |
| "epoch": 0.014553774816258592, |
| "grad_norm": 10.489923477172852, |
| "learning_rate": 1.999770515246298e-07, |
| "loss": 1.0087, |
| "step": 15800 |
| }, |
| { |
| "epoch": 0.014562986066142302, |
| "grad_norm": 11.607556343078613, |
| "learning_rate": 1.9997702050388795e-07, |
| "loss": 1.0183, |
| "step": 15810 |
| }, |
| { |
| "epoch": 0.01457219731602601, |
| "grad_norm": 10.539502143859863, |
| "learning_rate": 1.9997698946219637e-07, |
| "loss": 1.0323, |
| "step": 15820 |
| }, |
| { |
| "epoch": 0.014581408565909717, |
| "grad_norm": 10.956584930419922, |
| "learning_rate": 1.9997695839955515e-07, |
| "loss": 1.0222, |
| "step": 15830 |
| }, |
| { |
| "epoch": 0.014590619815793425, |
| "grad_norm": 11.396410942077637, |
| "learning_rate": 1.999769273159643e-07, |
| "loss": 1.0297, |
| "step": 15840 |
| }, |
| { |
| "epoch": 0.014599831065677133, |
| "grad_norm": 10.180088996887207, |
| "learning_rate": 1.9997689621142378e-07, |
| "loss": 1.0199, |
| "step": 15850 |
| }, |
| { |
| "epoch": 0.01460904231556084, |
| "grad_norm": 11.914243698120117, |
| "learning_rate": 1.9997686508593361e-07, |
| "loss": 1.0468, |
| "step": 15860 |
| }, |
| { |
| "epoch": 0.014618253565444548, |
| "grad_norm": 10.573568344116211, |
| "learning_rate": 1.9997683393949381e-07, |
| "loss": 1.0195, |
| "step": 15870 |
| }, |
| { |
| "epoch": 0.014627464815328256, |
| "grad_norm": 9.433327674865723, |
| "learning_rate": 1.9997680277210435e-07, |
| "loss": 0.9811, |
| "step": 15880 |
| }, |
| { |
| "epoch": 0.014636676065211966, |
| "grad_norm": 11.371285438537598, |
| "learning_rate": 1.9997677158376532e-07, |
| "loss": 1.0538, |
| "step": 15890 |
| }, |
| { |
| "epoch": 0.014645887315095673, |
| "grad_norm": 11.240488052368164, |
| "learning_rate": 1.9997674037447663e-07, |
| "loss": 1.019, |
| "step": 15900 |
| }, |
| { |
| "epoch": 0.014655098564979381, |
| "grad_norm": 12.941450119018555, |
| "learning_rate": 1.9997670914423835e-07, |
| "loss": 1.0086, |
| "step": 15910 |
| }, |
| { |
| "epoch": 0.014664309814863089, |
| "grad_norm": 10.488619804382324, |
| "learning_rate": 1.9997667789305043e-07, |
| "loss": 1.0304, |
| "step": 15920 |
| }, |
| { |
| "epoch": 0.014673521064746797, |
| "grad_norm": 10.6552734375, |
| "learning_rate": 1.9997664662091295e-07, |
| "loss": 0.9878, |
| "step": 15930 |
| }, |
| { |
| "epoch": 0.014682732314630504, |
| "grad_norm": 11.282824516296387, |
| "learning_rate": 1.9997661532782586e-07, |
| "loss": 0.9951, |
| "step": 15940 |
| }, |
| { |
| "epoch": 0.014691943564514212, |
| "grad_norm": 10.387130737304688, |
| "learning_rate": 1.9997658401378918e-07, |
| "loss": 1.003, |
| "step": 15950 |
| }, |
| { |
| "epoch": 0.01470115481439792, |
| "grad_norm": 10.162776947021484, |
| "learning_rate": 1.9997655267880292e-07, |
| "loss": 1.0115, |
| "step": 15960 |
| }, |
| { |
| "epoch": 0.014710366064281628, |
| "grad_norm": 10.97257137298584, |
| "learning_rate": 1.999765213228671e-07, |
| "loss": 1.0138, |
| "step": 15970 |
| }, |
| { |
| "epoch": 0.014719577314165337, |
| "grad_norm": 11.34595775604248, |
| "learning_rate": 1.999764899459817e-07, |
| "loss": 1.0411, |
| "step": 15980 |
| }, |
| { |
| "epoch": 0.014728788564049045, |
| "grad_norm": 10.533472061157227, |
| "learning_rate": 1.9997645854814674e-07, |
| "loss": 1.0437, |
| "step": 15990 |
| }, |
| { |
| "epoch": 0.014737999813932753, |
| "grad_norm": 10.764908790588379, |
| "learning_rate": 1.9997642712936224e-07, |
| "loss": 1.0559, |
| "step": 16000 |
| }, |
| { |
| "epoch": 0.01474721106381646, |
| "grad_norm": 11.238125801086426, |
| "learning_rate": 1.9997639568962817e-07, |
| "loss": 1.0403, |
| "step": 16010 |
| }, |
| { |
| "epoch": 0.014756422313700168, |
| "grad_norm": 11.81557559967041, |
| "learning_rate": 1.9997636422894463e-07, |
| "loss": 0.9877, |
| "step": 16020 |
| }, |
| { |
| "epoch": 0.014765633563583876, |
| "grad_norm": 11.288442611694336, |
| "learning_rate": 1.9997633274731147e-07, |
| "loss": 1.0644, |
| "step": 16030 |
| }, |
| { |
| "epoch": 0.014774844813467584, |
| "grad_norm": 11.046507835388184, |
| "learning_rate": 1.999763012447288e-07, |
| "loss": 1.0762, |
| "step": 16040 |
| }, |
| { |
| "epoch": 0.014784056063351292, |
| "grad_norm": 11.0950345993042, |
| "learning_rate": 1.9997626972119663e-07, |
| "loss": 0.9895, |
| "step": 16050 |
| }, |
| { |
| "epoch": 0.014793267313235001, |
| "grad_norm": 11.63463306427002, |
| "learning_rate": 1.9997623817671496e-07, |
| "loss": 1.0561, |
| "step": 16060 |
| }, |
| { |
| "epoch": 0.014802478563118709, |
| "grad_norm": 15.151458740234375, |
| "learning_rate": 1.9997620661128377e-07, |
| "loss": 1.0286, |
| "step": 16070 |
| }, |
| { |
| "epoch": 0.014811689813002417, |
| "grad_norm": 11.317404747009277, |
| "learning_rate": 1.9997617502490308e-07, |
| "loss": 0.9999, |
| "step": 16080 |
| }, |
| { |
| "epoch": 0.014820901062886124, |
| "grad_norm": 10.923965454101562, |
| "learning_rate": 1.999761434175729e-07, |
| "loss": 1.0377, |
| "step": 16090 |
| }, |
| { |
| "epoch": 0.014830112312769832, |
| "grad_norm": 10.43481731414795, |
| "learning_rate": 1.9997611178929322e-07, |
| "loss": 1.0124, |
| "step": 16100 |
| }, |
| { |
| "epoch": 0.01483932356265354, |
| "grad_norm": 10.214775085449219, |
| "learning_rate": 1.9997608014006406e-07, |
| "loss": 1.0054, |
| "step": 16110 |
| }, |
| { |
| "epoch": 0.014848534812537248, |
| "grad_norm": 10.96606731414795, |
| "learning_rate": 1.9997604846988542e-07, |
| "loss": 1.1042, |
| "step": 16120 |
| }, |
| { |
| "epoch": 0.014857746062420955, |
| "grad_norm": 11.680390357971191, |
| "learning_rate": 1.9997601677875732e-07, |
| "loss": 0.9866, |
| "step": 16130 |
| }, |
| { |
| "epoch": 0.014866957312304663, |
| "grad_norm": 9.644798278808594, |
| "learning_rate": 1.999759850666798e-07, |
| "loss": 1.0183, |
| "step": 16140 |
| }, |
| { |
| "epoch": 0.014876168562188373, |
| "grad_norm": 9.996898651123047, |
| "learning_rate": 1.9997595333365277e-07, |
| "loss": 1.0179, |
| "step": 16150 |
| }, |
| { |
| "epoch": 0.01488537981207208, |
| "grad_norm": 13.37299633026123, |
| "learning_rate": 1.999759215796763e-07, |
| "loss": 1.0088, |
| "step": 16160 |
| }, |
| { |
| "epoch": 0.014894591061955788, |
| "grad_norm": 10.963496208190918, |
| "learning_rate": 1.9997588980475042e-07, |
| "loss": 1.029, |
| "step": 16170 |
| }, |
| { |
| "epoch": 0.014903802311839496, |
| "grad_norm": 12.150546073913574, |
| "learning_rate": 1.9997585800887508e-07, |
| "loss": 1.0221, |
| "step": 16180 |
| }, |
| { |
| "epoch": 0.014913013561723204, |
| "grad_norm": 11.504340171813965, |
| "learning_rate": 1.9997582619205034e-07, |
| "loss": 1.0088, |
| "step": 16190 |
| }, |
| { |
| "epoch": 0.014922224811606911, |
| "grad_norm": 12.829537391662598, |
| "learning_rate": 1.9997579435427615e-07, |
| "loss": 1.0217, |
| "step": 16200 |
| }, |
| { |
| "epoch": 0.01493143606149062, |
| "grad_norm": 10.13871955871582, |
| "learning_rate": 1.9997576249555258e-07, |
| "loss": 0.9937, |
| "step": 16210 |
| }, |
| { |
| "epoch": 0.014940647311374327, |
| "grad_norm": 10.586207389831543, |
| "learning_rate": 1.9997573061587955e-07, |
| "loss": 1.0372, |
| "step": 16220 |
| }, |
| { |
| "epoch": 0.014949858561258036, |
| "grad_norm": 10.998656272888184, |
| "learning_rate": 1.9997569871525715e-07, |
| "loss": 0.9843, |
| "step": 16230 |
| }, |
| { |
| "epoch": 0.014959069811141744, |
| "grad_norm": 9.929503440856934, |
| "learning_rate": 1.9997566679368535e-07, |
| "loss": 0.9964, |
| "step": 16240 |
| }, |
| { |
| "epoch": 0.014968281061025452, |
| "grad_norm": 12.224425315856934, |
| "learning_rate": 1.9997563485116418e-07, |
| "loss": 1.0141, |
| "step": 16250 |
| }, |
| { |
| "epoch": 0.01497749231090916, |
| "grad_norm": 14.478561401367188, |
| "learning_rate": 1.9997560288769363e-07, |
| "loss": 1.0116, |
| "step": 16260 |
| }, |
| { |
| "epoch": 0.014986703560792868, |
| "grad_norm": 11.508176803588867, |
| "learning_rate": 1.9997557090327368e-07, |
| "loss": 0.9814, |
| "step": 16270 |
| }, |
| { |
| "epoch": 0.014995914810676575, |
| "grad_norm": 10.49217414855957, |
| "learning_rate": 1.9997553889790438e-07, |
| "loss": 1.0044, |
| "step": 16280 |
| }, |
| { |
| "epoch": 0.015005126060560283, |
| "grad_norm": 15.771960258483887, |
| "learning_rate": 1.999755068715857e-07, |
| "loss": 1.0449, |
| "step": 16290 |
| }, |
| { |
| "epoch": 0.01501433731044399, |
| "grad_norm": 10.940170288085938, |
| "learning_rate": 1.9997547482431768e-07, |
| "loss": 0.9913, |
| "step": 16300 |
| }, |
| { |
| "epoch": 0.015023548560327699, |
| "grad_norm": 10.522229194641113, |
| "learning_rate": 1.999754427561003e-07, |
| "loss": 1.0081, |
| "step": 16310 |
| }, |
| { |
| "epoch": 0.015032759810211408, |
| "grad_norm": 9.865076065063477, |
| "learning_rate": 1.9997541066693362e-07, |
| "loss": 0.9688, |
| "step": 16320 |
| }, |
| { |
| "epoch": 0.015041971060095116, |
| "grad_norm": 11.216987609863281, |
| "learning_rate": 1.9997537855681758e-07, |
| "loss": 1.0579, |
| "step": 16330 |
| }, |
| { |
| "epoch": 0.015051182309978824, |
| "grad_norm": 11.8953275680542, |
| "learning_rate": 1.9997534642575222e-07, |
| "loss": 1.0426, |
| "step": 16340 |
| }, |
| { |
| "epoch": 0.015060393559862531, |
| "grad_norm": 10.155219078063965, |
| "learning_rate": 1.9997531427373753e-07, |
| "loss": 1.0023, |
| "step": 16350 |
| }, |
| { |
| "epoch": 0.01506960480974624, |
| "grad_norm": 10.468849182128906, |
| "learning_rate": 1.9997528210077355e-07, |
| "loss": 0.9388, |
| "step": 16360 |
| }, |
| { |
| "epoch": 0.015078816059629947, |
| "grad_norm": 10.223745346069336, |
| "learning_rate": 1.9997524990686023e-07, |
| "loss": 0.995, |
| "step": 16370 |
| }, |
| { |
| "epoch": 0.015088027309513655, |
| "grad_norm": 11.93093204498291, |
| "learning_rate": 1.9997521769199763e-07, |
| "loss": 0.9931, |
| "step": 16380 |
| }, |
| { |
| "epoch": 0.015097238559397362, |
| "grad_norm": 9.38044261932373, |
| "learning_rate": 1.999751854561857e-07, |
| "loss": 1.0329, |
| "step": 16390 |
| }, |
| { |
| "epoch": 0.015106449809281072, |
| "grad_norm": 10.644182205200195, |
| "learning_rate": 1.9997515319942453e-07, |
| "loss": 1.0195, |
| "step": 16400 |
| }, |
| { |
| "epoch": 0.01511566105916478, |
| "grad_norm": 10.533086776733398, |
| "learning_rate": 1.9997512092171407e-07, |
| "loss": 0.9872, |
| "step": 16410 |
| }, |
| { |
| "epoch": 0.015124872309048487, |
| "grad_norm": 9.349508285522461, |
| "learning_rate": 1.9997508862305435e-07, |
| "loss": 0.9898, |
| "step": 16420 |
| }, |
| { |
| "epoch": 0.015134083558932195, |
| "grad_norm": 10.438314437866211, |
| "learning_rate": 1.9997505630344533e-07, |
| "loss": 1.0031, |
| "step": 16430 |
| }, |
| { |
| "epoch": 0.015143294808815903, |
| "grad_norm": 11.590189933776855, |
| "learning_rate": 1.999750239628871e-07, |
| "loss": 1.0238, |
| "step": 16440 |
| }, |
| { |
| "epoch": 0.01515250605869961, |
| "grad_norm": 10.662530899047852, |
| "learning_rate": 1.9997499160137958e-07, |
| "loss": 0.9699, |
| "step": 16450 |
| }, |
| { |
| "epoch": 0.015161717308583319, |
| "grad_norm": 12.296950340270996, |
| "learning_rate": 1.999749592189228e-07, |
| "loss": 1.053, |
| "step": 16460 |
| }, |
| { |
| "epoch": 0.015170928558467026, |
| "grad_norm": 10.7608642578125, |
| "learning_rate": 1.9997492681551682e-07, |
| "loss": 1.0611, |
| "step": 16470 |
| }, |
| { |
| "epoch": 0.015180139808350736, |
| "grad_norm": 10.618109703063965, |
| "learning_rate": 1.9997489439116156e-07, |
| "loss": 0.9994, |
| "step": 16480 |
| }, |
| { |
| "epoch": 0.015189351058234444, |
| "grad_norm": 9.583956718444824, |
| "learning_rate": 1.9997486194585714e-07, |
| "loss": 0.9805, |
| "step": 16490 |
| }, |
| { |
| "epoch": 0.015198562308118151, |
| "grad_norm": 10.786053657531738, |
| "learning_rate": 1.9997482947960345e-07, |
| "loss": 1.0124, |
| "step": 16500 |
| }, |
| { |
| "epoch": 0.015207773558001859, |
| "grad_norm": 10.390673637390137, |
| "learning_rate": 1.9997479699240057e-07, |
| "loss": 0.991, |
| "step": 16510 |
| }, |
| { |
| "epoch": 0.015216984807885567, |
| "grad_norm": 10.212191581726074, |
| "learning_rate": 1.9997476448424848e-07, |
| "loss": 0.9708, |
| "step": 16520 |
| }, |
| { |
| "epoch": 0.015226196057769275, |
| "grad_norm": 10.948951721191406, |
| "learning_rate": 1.999747319551472e-07, |
| "loss": 1.0237, |
| "step": 16530 |
| }, |
| { |
| "epoch": 0.015235407307652982, |
| "grad_norm": 9.896740913391113, |
| "learning_rate": 1.999746994050967e-07, |
| "loss": 1.0301, |
| "step": 16540 |
| }, |
| { |
| "epoch": 0.01524461855753669, |
| "grad_norm": 11.090027809143066, |
| "learning_rate": 1.9997466683409706e-07, |
| "loss": 1.0295, |
| "step": 16550 |
| }, |
| { |
| "epoch": 0.015253829807420398, |
| "grad_norm": 10.565263748168945, |
| "learning_rate": 1.9997463424214818e-07, |
| "loss": 0.9939, |
| "step": 16560 |
| }, |
| { |
| "epoch": 0.015263041057304107, |
| "grad_norm": 11.018289566040039, |
| "learning_rate": 1.9997460162925019e-07, |
| "loss": 0.9835, |
| "step": 16570 |
| }, |
| { |
| "epoch": 0.015272252307187815, |
| "grad_norm": 11.111709594726562, |
| "learning_rate": 1.99974568995403e-07, |
| "loss": 0.9855, |
| "step": 16580 |
| }, |
| { |
| "epoch": 0.015281463557071523, |
| "grad_norm": 10.70183277130127, |
| "learning_rate": 1.999745363406067e-07, |
| "loss": 0.9799, |
| "step": 16590 |
| }, |
| { |
| "epoch": 0.01529067480695523, |
| "grad_norm": 11.627222061157227, |
| "learning_rate": 1.999745036648612e-07, |
| "loss": 1.0439, |
| "step": 16600 |
| }, |
| { |
| "epoch": 0.015299886056838938, |
| "grad_norm": 11.132915496826172, |
| "learning_rate": 1.9997447096816655e-07, |
| "loss": 0.982, |
| "step": 16610 |
| }, |
| { |
| "epoch": 0.015309097306722646, |
| "grad_norm": 9.92930793762207, |
| "learning_rate": 1.999744382505228e-07, |
| "loss": 1.0022, |
| "step": 16620 |
| }, |
| { |
| "epoch": 0.015318308556606354, |
| "grad_norm": 10.37789535522461, |
| "learning_rate": 1.999744055119299e-07, |
| "loss": 1.024, |
| "step": 16630 |
| }, |
| { |
| "epoch": 0.015327519806490062, |
| "grad_norm": 10.162717819213867, |
| "learning_rate": 1.9997437275238788e-07, |
| "loss": 0.9883, |
| "step": 16640 |
| }, |
| { |
| "epoch": 0.015336731056373771, |
| "grad_norm": 11.870095252990723, |
| "learning_rate": 1.9997433997189675e-07, |
| "loss": 1.0127, |
| "step": 16650 |
| }, |
| { |
| "epoch": 0.015345942306257479, |
| "grad_norm": 12.083418846130371, |
| "learning_rate": 1.999743071704565e-07, |
| "loss": 1.0232, |
| "step": 16660 |
| }, |
| { |
| "epoch": 0.015355153556141187, |
| "grad_norm": 10.490384101867676, |
| "learning_rate": 1.9997427434806715e-07, |
| "loss": 0.9891, |
| "step": 16670 |
| }, |
| { |
| "epoch": 0.015364364806024895, |
| "grad_norm": 10.339552879333496, |
| "learning_rate": 1.999742415047287e-07, |
| "loss": 1.0276, |
| "step": 16680 |
| }, |
| { |
| "epoch": 0.015373576055908602, |
| "grad_norm": 9.56631851196289, |
| "learning_rate": 1.999742086404412e-07, |
| "loss": 0.986, |
| "step": 16690 |
| }, |
| { |
| "epoch": 0.01538278730579231, |
| "grad_norm": 10.870406150817871, |
| "learning_rate": 1.9997417575520456e-07, |
| "loss": 1.0202, |
| "step": 16700 |
| }, |
| { |
| "epoch": 0.015391998555676018, |
| "grad_norm": 10.942203521728516, |
| "learning_rate": 1.9997414284901887e-07, |
| "loss": 1.0454, |
| "step": 16710 |
| }, |
| { |
| "epoch": 0.015401209805559726, |
| "grad_norm": 11.052876472473145, |
| "learning_rate": 1.9997410992188412e-07, |
| "loss": 0.989, |
| "step": 16720 |
| }, |
| { |
| "epoch": 0.015410421055443433, |
| "grad_norm": 10.621703147888184, |
| "learning_rate": 1.9997407697380032e-07, |
| "loss": 0.9935, |
| "step": 16730 |
| }, |
| { |
| "epoch": 0.015419632305327143, |
| "grad_norm": 12.369471549987793, |
| "learning_rate": 1.9997404400476746e-07, |
| "loss": 1.0163, |
| "step": 16740 |
| }, |
| { |
| "epoch": 0.01542884355521085, |
| "grad_norm": 11.051840782165527, |
| "learning_rate": 1.9997401101478552e-07, |
| "loss": 0.9967, |
| "step": 16750 |
| }, |
| { |
| "epoch": 0.015438054805094558, |
| "grad_norm": 11.502120971679688, |
| "learning_rate": 1.9997397800385457e-07, |
| "loss": 1.0321, |
| "step": 16760 |
| }, |
| { |
| "epoch": 0.015447266054978266, |
| "grad_norm": 10.186074256896973, |
| "learning_rate": 1.999739449719746e-07, |
| "loss": 0.9986, |
| "step": 16770 |
| }, |
| { |
| "epoch": 0.015456477304861974, |
| "grad_norm": 14.996305465698242, |
| "learning_rate": 1.9997391191914556e-07, |
| "loss": 0.9929, |
| "step": 16780 |
| }, |
| { |
| "epoch": 0.015465688554745682, |
| "grad_norm": 9.845305442810059, |
| "learning_rate": 1.9997387884536754e-07, |
| "loss": 1.0096, |
| "step": 16790 |
| }, |
| { |
| "epoch": 0.01547489980462939, |
| "grad_norm": 10.184284210205078, |
| "learning_rate": 1.999738457506405e-07, |
| "loss": 0.9997, |
| "step": 16800 |
| }, |
| { |
| "epoch": 0.015484111054513097, |
| "grad_norm": 10.943856239318848, |
| "learning_rate": 1.9997381263496445e-07, |
| "loss": 1.0066, |
| "step": 16810 |
| }, |
| { |
| "epoch": 0.015493322304396807, |
| "grad_norm": 10.228456497192383, |
| "learning_rate": 1.999737794983394e-07, |
| "loss": 1.0307, |
| "step": 16820 |
| }, |
| { |
| "epoch": 0.015502533554280514, |
| "grad_norm": 10.381999015808105, |
| "learning_rate": 1.9997374634076537e-07, |
| "loss": 1.0084, |
| "step": 16830 |
| }, |
| { |
| "epoch": 0.015511744804164222, |
| "grad_norm": 10.954923629760742, |
| "learning_rate": 1.999737131622423e-07, |
| "loss": 0.9729, |
| "step": 16840 |
| }, |
| { |
| "epoch": 0.01552095605404793, |
| "grad_norm": 11.323572158813477, |
| "learning_rate": 1.9997367996277033e-07, |
| "loss": 0.957, |
| "step": 16850 |
| }, |
| { |
| "epoch": 0.015530167303931638, |
| "grad_norm": 13.106945037841797, |
| "learning_rate": 1.9997364674234936e-07, |
| "loss": 1.041, |
| "step": 16860 |
| }, |
| { |
| "epoch": 0.015539378553815346, |
| "grad_norm": 10.03570556640625, |
| "learning_rate": 1.999736135009794e-07, |
| "loss": 0.9723, |
| "step": 16870 |
| }, |
| { |
| "epoch": 0.015548589803699053, |
| "grad_norm": 10.501392364501953, |
| "learning_rate": 1.999735802386605e-07, |
| "loss": 0.9999, |
| "step": 16880 |
| }, |
| { |
| "epoch": 0.015557801053582761, |
| "grad_norm": 10.961532592773438, |
| "learning_rate": 1.9997354695539266e-07, |
| "loss": 0.9945, |
| "step": 16890 |
| }, |
| { |
| "epoch": 0.015567012303466469, |
| "grad_norm": 10.83249282836914, |
| "learning_rate": 1.9997351365117587e-07, |
| "loss": 1.0075, |
| "step": 16900 |
| }, |
| { |
| "epoch": 0.015576223553350178, |
| "grad_norm": 11.543451309204102, |
| "learning_rate": 1.9997348032601015e-07, |
| "loss": 1.0443, |
| "step": 16910 |
| }, |
| { |
| "epoch": 0.015585434803233886, |
| "grad_norm": 9.75156021118164, |
| "learning_rate": 1.9997344697989548e-07, |
| "loss": 0.9775, |
| "step": 16920 |
| }, |
| { |
| "epoch": 0.015594646053117594, |
| "grad_norm": 11.097611427307129, |
| "learning_rate": 1.9997341361283191e-07, |
| "loss": 1.0041, |
| "step": 16930 |
| }, |
| { |
| "epoch": 0.015603857303001302, |
| "grad_norm": 11.022612571716309, |
| "learning_rate": 1.9997338022481942e-07, |
| "loss": 1.0391, |
| "step": 16940 |
| }, |
| { |
| "epoch": 0.01561306855288501, |
| "grad_norm": 11.707490921020508, |
| "learning_rate": 1.9997334681585803e-07, |
| "loss": 1.0466, |
| "step": 16950 |
| }, |
| { |
| "epoch": 0.015622279802768717, |
| "grad_norm": 9.554755210876465, |
| "learning_rate": 1.9997331338594771e-07, |
| "loss": 1.0355, |
| "step": 16960 |
| }, |
| { |
| "epoch": 0.015631491052652425, |
| "grad_norm": 9.569435119628906, |
| "learning_rate": 1.9997327993508853e-07, |
| "loss": 0.9872, |
| "step": 16970 |
| }, |
| { |
| "epoch": 0.015640702302536134, |
| "grad_norm": 10.927450180053711, |
| "learning_rate": 1.9997324646328044e-07, |
| "loss": 1.0344, |
| "step": 16980 |
| }, |
| { |
| "epoch": 0.01564991355241984, |
| "grad_norm": 9.773367881774902, |
| "learning_rate": 1.9997321297052348e-07, |
| "loss": 0.985, |
| "step": 16990 |
| }, |
| { |
| "epoch": 0.01565912480230355, |
| "grad_norm": 11.930747985839844, |
| "learning_rate": 1.9997317945681765e-07, |
| "loss": 1.0228, |
| "step": 17000 |
| }, |
| { |
| "epoch": 0.015668336052187256, |
| "grad_norm": 9.844359397888184, |
| "learning_rate": 1.9997314592216295e-07, |
| "loss": 0.9937, |
| "step": 17010 |
| }, |
| { |
| "epoch": 0.015677547302070965, |
| "grad_norm": 20.886455535888672, |
| "learning_rate": 1.9997311236655938e-07, |
| "loss": 0.9924, |
| "step": 17020 |
| }, |
| { |
| "epoch": 0.015686758551954675, |
| "grad_norm": 10.415185928344727, |
| "learning_rate": 1.9997307879000696e-07, |
| "loss": 0.9427, |
| "step": 17030 |
| }, |
| { |
| "epoch": 0.01569596980183838, |
| "grad_norm": 10.957759857177734, |
| "learning_rate": 1.9997304519250573e-07, |
| "loss": 1.0856, |
| "step": 17040 |
| }, |
| { |
| "epoch": 0.01570518105172209, |
| "grad_norm": 13.16639518737793, |
| "learning_rate": 1.9997301157405562e-07, |
| "loss": 1.0028, |
| "step": 17050 |
| }, |
| { |
| "epoch": 0.015714392301605797, |
| "grad_norm": 9.69345760345459, |
| "learning_rate": 1.9997297793465671e-07, |
| "loss": 1.0537, |
| "step": 17060 |
| }, |
| { |
| "epoch": 0.015723603551489506, |
| "grad_norm": 14.802711486816406, |
| "learning_rate": 1.9997294427430894e-07, |
| "loss": 0.9991, |
| "step": 17070 |
| }, |
| { |
| "epoch": 0.015732814801373212, |
| "grad_norm": 11.45785903930664, |
| "learning_rate": 1.9997291059301238e-07, |
| "loss": 1.0297, |
| "step": 17080 |
| }, |
| { |
| "epoch": 0.01574202605125692, |
| "grad_norm": 11.090860366821289, |
| "learning_rate": 1.9997287689076702e-07, |
| "loss": 0.9888, |
| "step": 17090 |
| }, |
| { |
| "epoch": 0.015751237301140628, |
| "grad_norm": 11.352603912353516, |
| "learning_rate": 1.9997284316757282e-07, |
| "loss": 1.0552, |
| "step": 17100 |
| }, |
| { |
| "epoch": 0.015760448551024337, |
| "grad_norm": 9.805356979370117, |
| "learning_rate": 1.9997280942342987e-07, |
| "loss": 1.0513, |
| "step": 17110 |
| }, |
| { |
| "epoch": 0.015769659800908047, |
| "grad_norm": 10.430112838745117, |
| "learning_rate": 1.9997277565833809e-07, |
| "loss": 1.028, |
| "step": 17120 |
| }, |
| { |
| "epoch": 0.015778871050791753, |
| "grad_norm": 11.488729476928711, |
| "learning_rate": 1.9997274187229753e-07, |
| "loss": 1.046, |
| "step": 17130 |
| }, |
| { |
| "epoch": 0.015788082300675462, |
| "grad_norm": 11.830671310424805, |
| "learning_rate": 1.9997270806530824e-07, |
| "loss": 1.0309, |
| "step": 17140 |
| }, |
| { |
| "epoch": 0.015797293550559168, |
| "grad_norm": 10.939457893371582, |
| "learning_rate": 1.9997267423737015e-07, |
| "loss": 0.9708, |
| "step": 17150 |
| }, |
| { |
| "epoch": 0.015806504800442878, |
| "grad_norm": 11.219234466552734, |
| "learning_rate": 1.999726403884833e-07, |
| "loss": 0.976, |
| "step": 17160 |
| }, |
| { |
| "epoch": 0.015815716050326584, |
| "grad_norm": 10.39037799835205, |
| "learning_rate": 1.9997260651864768e-07, |
| "loss": 0.9592, |
| "step": 17170 |
| }, |
| { |
| "epoch": 0.015824927300210293, |
| "grad_norm": 10.798562049865723, |
| "learning_rate": 1.9997257262786335e-07, |
| "loss": 1.0061, |
| "step": 17180 |
| }, |
| { |
| "epoch": 0.015834138550094, |
| "grad_norm": 10.807364463806152, |
| "learning_rate": 1.9997253871613026e-07, |
| "loss": 1.0021, |
| "step": 17190 |
| }, |
| { |
| "epoch": 0.01584334979997771, |
| "grad_norm": 9.798983573913574, |
| "learning_rate": 1.9997250478344845e-07, |
| "loss": 0.9878, |
| "step": 17200 |
| }, |
| { |
| "epoch": 0.015852561049861418, |
| "grad_norm": 10.611905097961426, |
| "learning_rate": 1.9997247082981793e-07, |
| "loss": 1.0163, |
| "step": 17210 |
| }, |
| { |
| "epoch": 0.015861772299745124, |
| "grad_norm": 11.140785217285156, |
| "learning_rate": 1.999724368552387e-07, |
| "loss": 1.0074, |
| "step": 17220 |
| }, |
| { |
| "epoch": 0.015870983549628834, |
| "grad_norm": 10.84278392791748, |
| "learning_rate": 1.999724028597107e-07, |
| "loss": 0.9807, |
| "step": 17230 |
| }, |
| { |
| "epoch": 0.01588019479951254, |
| "grad_norm": 11.232892036437988, |
| "learning_rate": 1.9997236884323403e-07, |
| "loss": 1.0134, |
| "step": 17240 |
| }, |
| { |
| "epoch": 0.01588940604939625, |
| "grad_norm": 10.130370140075684, |
| "learning_rate": 1.9997233480580865e-07, |
| "loss": 1.0446, |
| "step": 17250 |
| }, |
| { |
| "epoch": 0.015898617299279955, |
| "grad_norm": 11.66458511352539, |
| "learning_rate": 1.9997230074743462e-07, |
| "loss": 1.0432, |
| "step": 17260 |
| }, |
| { |
| "epoch": 0.015907828549163665, |
| "grad_norm": 11.601715087890625, |
| "learning_rate": 1.9997226666811187e-07, |
| "loss": 1.0229, |
| "step": 17270 |
| }, |
| { |
| "epoch": 0.01591703979904737, |
| "grad_norm": 10.128191947937012, |
| "learning_rate": 1.9997223256784047e-07, |
| "loss": 1.0119, |
| "step": 17280 |
| }, |
| { |
| "epoch": 0.01592625104893108, |
| "grad_norm": 10.157410621643066, |
| "learning_rate": 1.9997219844662037e-07, |
| "loss": 0.9809, |
| "step": 17290 |
| }, |
| { |
| "epoch": 0.01593546229881479, |
| "grad_norm": 10.83198070526123, |
| "learning_rate": 1.9997216430445162e-07, |
| "loss": 1.0243, |
| "step": 17300 |
| }, |
| { |
| "epoch": 0.015944673548698496, |
| "grad_norm": 10.55180549621582, |
| "learning_rate": 1.9997213014133426e-07, |
| "loss": 1.0194, |
| "step": 17310 |
| }, |
| { |
| "epoch": 0.015953884798582205, |
| "grad_norm": 11.115482330322266, |
| "learning_rate": 1.999720959572682e-07, |
| "loss": 1.0051, |
| "step": 17320 |
| }, |
| { |
| "epoch": 0.01596309604846591, |
| "grad_norm": 10.972350120544434, |
| "learning_rate": 1.9997206175225353e-07, |
| "loss": 1.0537, |
| "step": 17330 |
| }, |
| { |
| "epoch": 0.01597230729834962, |
| "grad_norm": 11.979573249816895, |
| "learning_rate": 1.9997202752629021e-07, |
| "loss": 0.9779, |
| "step": 17340 |
| }, |
| { |
| "epoch": 0.015981518548233327, |
| "grad_norm": 11.33555793762207, |
| "learning_rate": 1.999719932793783e-07, |
| "loss": 1.0662, |
| "step": 17350 |
| }, |
| { |
| "epoch": 0.015990729798117036, |
| "grad_norm": 10.43903636932373, |
| "learning_rate": 1.9997195901151774e-07, |
| "loss": 1.005, |
| "step": 17360 |
| }, |
| { |
| "epoch": 0.015999941048000746, |
| "grad_norm": 10.561065673828125, |
| "learning_rate": 1.9997192472270858e-07, |
| "loss": 1.0744, |
| "step": 17370 |
| }, |
| { |
| "epoch": 0.016009152297884452, |
| "grad_norm": 9.711095809936523, |
| "learning_rate": 1.999718904129508e-07, |
| "loss": 1.0695, |
| "step": 17380 |
| }, |
| { |
| "epoch": 0.01601836354776816, |
| "grad_norm": 9.789228439331055, |
| "learning_rate": 1.9997185608224446e-07, |
| "loss": 1.03, |
| "step": 17390 |
| }, |
| { |
| "epoch": 0.016027574797651867, |
| "grad_norm": 10.400520324707031, |
| "learning_rate": 1.999718217305895e-07, |
| "loss": 0.9532, |
| "step": 17400 |
| }, |
| { |
| "epoch": 0.016036786047535577, |
| "grad_norm": 11.010263442993164, |
| "learning_rate": 1.9997178735798596e-07, |
| "loss": 0.9943, |
| "step": 17410 |
| }, |
| { |
| "epoch": 0.016045997297419283, |
| "grad_norm": 15.474273681640625, |
| "learning_rate": 1.9997175296443384e-07, |
| "loss": 1.0042, |
| "step": 17420 |
| }, |
| { |
| "epoch": 0.016055208547302992, |
| "grad_norm": 10.531010627746582, |
| "learning_rate": 1.9997171854993317e-07, |
| "loss": 0.9631, |
| "step": 17430 |
| }, |
| { |
| "epoch": 0.0160644197971867, |
| "grad_norm": 10.378925323486328, |
| "learning_rate": 1.9997168411448397e-07, |
| "loss": 0.9845, |
| "step": 17440 |
| }, |
| { |
| "epoch": 0.016073631047070408, |
| "grad_norm": 10.572704315185547, |
| "learning_rate": 1.9997164965808616e-07, |
| "loss": 0.9977, |
| "step": 17450 |
| }, |
| { |
| "epoch": 0.016082842296954118, |
| "grad_norm": 11.114181518554688, |
| "learning_rate": 1.9997161518073983e-07, |
| "loss": 1.009, |
| "step": 17460 |
| }, |
| { |
| "epoch": 0.016092053546837824, |
| "grad_norm": 11.28781509399414, |
| "learning_rate": 1.9997158068244497e-07, |
| "loss": 0.9976, |
| "step": 17470 |
| }, |
| { |
| "epoch": 0.016101264796721533, |
| "grad_norm": 16.513513565063477, |
| "learning_rate": 1.9997154616320158e-07, |
| "loss": 1.0203, |
| "step": 17480 |
| }, |
| { |
| "epoch": 0.01611047604660524, |
| "grad_norm": 11.120192527770996, |
| "learning_rate": 1.9997151162300966e-07, |
| "loss": 0.9948, |
| "step": 17490 |
| }, |
| { |
| "epoch": 0.01611968729648895, |
| "grad_norm": 12.403801918029785, |
| "learning_rate": 1.999714770618692e-07, |
| "loss": 1.0516, |
| "step": 17500 |
| }, |
| { |
| "epoch": 0.016128898546372655, |
| "grad_norm": 11.244853973388672, |
| "learning_rate": 1.9997144247978027e-07, |
| "loss": 0.9964, |
| "step": 17510 |
| }, |
| { |
| "epoch": 0.016138109796256364, |
| "grad_norm": 14.177594184875488, |
| "learning_rate": 1.999714078767428e-07, |
| "loss": 1.0216, |
| "step": 17520 |
| }, |
| { |
| "epoch": 0.01614732104614007, |
| "grad_norm": 10.221796035766602, |
| "learning_rate": 1.9997137325275687e-07, |
| "loss": 1.0113, |
| "step": 17530 |
| }, |
| { |
| "epoch": 0.01615653229602378, |
| "grad_norm": 10.774099349975586, |
| "learning_rate": 1.9997133860782242e-07, |
| "loss": 1.0133, |
| "step": 17540 |
| }, |
| { |
| "epoch": 0.01616574354590749, |
| "grad_norm": 10.379355430603027, |
| "learning_rate": 1.9997130394193952e-07, |
| "loss": 1.0969, |
| "step": 17550 |
| }, |
| { |
| "epoch": 0.016174954795791195, |
| "grad_norm": 10.891341209411621, |
| "learning_rate": 1.9997126925510815e-07, |
| "loss": 1.0296, |
| "step": 17560 |
| }, |
| { |
| "epoch": 0.016184166045674905, |
| "grad_norm": 13.507112503051758, |
| "learning_rate": 1.999712345473283e-07, |
| "loss": 1.057, |
| "step": 17570 |
| }, |
| { |
| "epoch": 0.01619337729555861, |
| "grad_norm": 11.034941673278809, |
| "learning_rate": 1.9997119981859998e-07, |
| "loss": 1.0096, |
| "step": 17580 |
| }, |
| { |
| "epoch": 0.01620258854544232, |
| "grad_norm": 11.114509582519531, |
| "learning_rate": 1.9997116506892323e-07, |
| "loss": 1.0029, |
| "step": 17590 |
| }, |
| { |
| "epoch": 0.016211799795326026, |
| "grad_norm": 9.768037796020508, |
| "learning_rate": 1.9997113029829798e-07, |
| "loss": 0.9696, |
| "step": 17600 |
| }, |
| { |
| "epoch": 0.016221011045209736, |
| "grad_norm": 11.427331924438477, |
| "learning_rate": 1.9997109550672437e-07, |
| "loss": 0.9929, |
| "step": 17610 |
| }, |
| { |
| "epoch": 0.016230222295093445, |
| "grad_norm": 10.357850074768066, |
| "learning_rate": 1.999710606942023e-07, |
| "loss": 1.023, |
| "step": 17620 |
| }, |
| { |
| "epoch": 0.01623943354497715, |
| "grad_norm": 11.832114219665527, |
| "learning_rate": 1.999710258607318e-07, |
| "loss": 1.0666, |
| "step": 17630 |
| }, |
| { |
| "epoch": 0.01624864479486086, |
| "grad_norm": 10.942581176757812, |
| "learning_rate": 1.999709910063129e-07, |
| "loss": 1.0151, |
| "step": 17640 |
| }, |
| { |
| "epoch": 0.016257856044744567, |
| "grad_norm": 10.847529411315918, |
| "learning_rate": 1.9997095613094559e-07, |
| "loss": 0.9968, |
| "step": 17650 |
| }, |
| { |
| "epoch": 0.016267067294628276, |
| "grad_norm": 9.716144561767578, |
| "learning_rate": 1.999709212346299e-07, |
| "loss": 0.951, |
| "step": 17660 |
| }, |
| { |
| "epoch": 0.016276278544511982, |
| "grad_norm": 10.874332427978516, |
| "learning_rate": 1.9997088631736577e-07, |
| "loss": 0.9931, |
| "step": 17670 |
| }, |
| { |
| "epoch": 0.016285489794395692, |
| "grad_norm": 9.12191390991211, |
| "learning_rate": 1.9997085137915328e-07, |
| "loss": 1.0397, |
| "step": 17680 |
| }, |
| { |
| "epoch": 0.016294701044279398, |
| "grad_norm": 10.333415031433105, |
| "learning_rate": 1.9997081641999237e-07, |
| "loss": 1.0207, |
| "step": 17690 |
| }, |
| { |
| "epoch": 0.016303912294163107, |
| "grad_norm": 13.753838539123535, |
| "learning_rate": 1.9997078143988315e-07, |
| "loss": 1.0005, |
| "step": 17700 |
| }, |
| { |
| "epoch": 0.016313123544046817, |
| "grad_norm": 10.496127128601074, |
| "learning_rate": 1.9997074643882555e-07, |
| "loss": 0.996, |
| "step": 17710 |
| }, |
| { |
| "epoch": 0.016322334793930523, |
| "grad_norm": 9.368756294250488, |
| "learning_rate": 1.9997071141681961e-07, |
| "loss": 0.9942, |
| "step": 17720 |
| }, |
| { |
| "epoch": 0.016331546043814232, |
| "grad_norm": 12.05996322631836, |
| "learning_rate": 1.9997067637386528e-07, |
| "loss": 0.9815, |
| "step": 17730 |
| }, |
| { |
| "epoch": 0.01634075729369794, |
| "grad_norm": 10.298500061035156, |
| "learning_rate": 1.999706413099626e-07, |
| "loss": 0.993, |
| "step": 17740 |
| }, |
| { |
| "epoch": 0.016349968543581648, |
| "grad_norm": 10.445028305053711, |
| "learning_rate": 1.9997060622511164e-07, |
| "loss": 0.9951, |
| "step": 17750 |
| }, |
| { |
| "epoch": 0.016359179793465354, |
| "grad_norm": 11.321356773376465, |
| "learning_rate": 1.9997057111931234e-07, |
| "loss": 1.0333, |
| "step": 17760 |
| }, |
| { |
| "epoch": 0.016368391043349063, |
| "grad_norm": 11.561808586120605, |
| "learning_rate": 1.9997053599256471e-07, |
| "loss": 0.9939, |
| "step": 17770 |
| }, |
| { |
| "epoch": 0.01637760229323277, |
| "grad_norm": 12.05006217956543, |
| "learning_rate": 1.9997050084486875e-07, |
| "loss": 0.9872, |
| "step": 17780 |
| }, |
| { |
| "epoch": 0.01638681354311648, |
| "grad_norm": 11.962194442749023, |
| "learning_rate": 1.999704656762245e-07, |
| "loss": 1.034, |
| "step": 17790 |
| }, |
| { |
| "epoch": 0.01639602479300019, |
| "grad_norm": 10.49455738067627, |
| "learning_rate": 1.9997043048663195e-07, |
| "loss": 0.9997, |
| "step": 17800 |
| }, |
| { |
| "epoch": 0.016405236042883894, |
| "grad_norm": 17.18958854675293, |
| "learning_rate": 1.9997039527609112e-07, |
| "loss": 1.0007, |
| "step": 17810 |
| }, |
| { |
| "epoch": 0.016414447292767604, |
| "grad_norm": 11.289200782775879, |
| "learning_rate": 1.9997036004460202e-07, |
| "loss": 0.9849, |
| "step": 17820 |
| }, |
| { |
| "epoch": 0.01642365854265131, |
| "grad_norm": 12.395259857177734, |
| "learning_rate": 1.999703247921646e-07, |
| "loss": 1.0259, |
| "step": 17830 |
| }, |
| { |
| "epoch": 0.01643286979253502, |
| "grad_norm": 10.041565895080566, |
| "learning_rate": 1.9997028951877893e-07, |
| "loss": 1.0227, |
| "step": 17840 |
| }, |
| { |
| "epoch": 0.016442081042418726, |
| "grad_norm": 10.784987449645996, |
| "learning_rate": 1.9997025422444502e-07, |
| "loss": 1.0137, |
| "step": 17850 |
| }, |
| { |
| "epoch": 0.016451292292302435, |
| "grad_norm": 11.328414916992188, |
| "learning_rate": 1.9997021890916285e-07, |
| "loss": 1.0021, |
| "step": 17860 |
| }, |
| { |
| "epoch": 0.01646050354218614, |
| "grad_norm": 11.163694381713867, |
| "learning_rate": 1.9997018357293244e-07, |
| "loss": 1.0082, |
| "step": 17870 |
| }, |
| { |
| "epoch": 0.01646971479206985, |
| "grad_norm": 12.4961519241333, |
| "learning_rate": 1.999701482157538e-07, |
| "loss": 1.0443, |
| "step": 17880 |
| }, |
| { |
| "epoch": 0.01647892604195356, |
| "grad_norm": 10.401840209960938, |
| "learning_rate": 1.9997011283762688e-07, |
| "loss": 1.0112, |
| "step": 17890 |
| }, |
| { |
| "epoch": 0.016488137291837266, |
| "grad_norm": 10.710625648498535, |
| "learning_rate": 1.9997007743855176e-07, |
| "loss": 1.0358, |
| "step": 17900 |
| }, |
| { |
| "epoch": 0.016497348541720976, |
| "grad_norm": 11.381387710571289, |
| "learning_rate": 1.9997004201852845e-07, |
| "loss": 1.0027, |
| "step": 17910 |
| }, |
| { |
| "epoch": 0.01650655979160468, |
| "grad_norm": 10.703309059143066, |
| "learning_rate": 1.9997000657755688e-07, |
| "loss": 0.9801, |
| "step": 17920 |
| }, |
| { |
| "epoch": 0.01651577104148839, |
| "grad_norm": 11.782756805419922, |
| "learning_rate": 1.9996997111563716e-07, |
| "loss": 1.0396, |
| "step": 17930 |
| }, |
| { |
| "epoch": 0.016524982291372097, |
| "grad_norm": 9.53162956237793, |
| "learning_rate": 1.9996993563276923e-07, |
| "loss": 1.0188, |
| "step": 17940 |
| }, |
| { |
| "epoch": 0.016534193541255807, |
| "grad_norm": 9.691423416137695, |
| "learning_rate": 1.999699001289531e-07, |
| "loss": 1.0217, |
| "step": 17950 |
| }, |
| { |
| "epoch": 0.016543404791139516, |
| "grad_norm": 10.514159202575684, |
| "learning_rate": 1.999698646041888e-07, |
| "loss": 1.0123, |
| "step": 17960 |
| }, |
| { |
| "epoch": 0.016552616041023222, |
| "grad_norm": 10.540403366088867, |
| "learning_rate": 1.9996982905847635e-07, |
| "loss": 1.0507, |
| "step": 17970 |
| }, |
| { |
| "epoch": 0.01656182729090693, |
| "grad_norm": 9.813848495483398, |
| "learning_rate": 1.999697934918157e-07, |
| "loss": 0.9792, |
| "step": 17980 |
| }, |
| { |
| "epoch": 0.016571038540790638, |
| "grad_norm": 11.38947868347168, |
| "learning_rate": 1.999697579042069e-07, |
| "loss": 1.0161, |
| "step": 17990 |
| }, |
| { |
| "epoch": 0.016580249790674347, |
| "grad_norm": 10.685944557189941, |
| "learning_rate": 1.9996972229564998e-07, |
| "loss": 1.0428, |
| "step": 18000 |
| }, |
| { |
| "epoch": 0.016589461040558053, |
| "grad_norm": 11.196683883666992, |
| "learning_rate": 1.999696866661449e-07, |
| "loss": 1.0101, |
| "step": 18010 |
| }, |
| { |
| "epoch": 0.016598672290441763, |
| "grad_norm": 10.966066360473633, |
| "learning_rate": 1.9996965101569167e-07, |
| "loss": 1.0149, |
| "step": 18020 |
| }, |
| { |
| "epoch": 0.01660788354032547, |
| "grad_norm": 9.404693603515625, |
| "learning_rate": 1.9996961534429035e-07, |
| "loss": 0.9569, |
| "step": 18030 |
| }, |
| { |
| "epoch": 0.016617094790209178, |
| "grad_norm": 10.461613655090332, |
| "learning_rate": 1.999695796519409e-07, |
| "loss": 0.9926, |
| "step": 18040 |
| }, |
| { |
| "epoch": 0.016626306040092888, |
| "grad_norm": 10.633156776428223, |
| "learning_rate": 1.999695439386433e-07, |
| "loss": 1.0169, |
| "step": 18050 |
| }, |
| { |
| "epoch": 0.016635517289976594, |
| "grad_norm": 10.073287963867188, |
| "learning_rate": 1.9996950820439765e-07, |
| "loss": 1.051, |
| "step": 18060 |
| }, |
| { |
| "epoch": 0.016644728539860303, |
| "grad_norm": 10.782968521118164, |
| "learning_rate": 1.9996947244920386e-07, |
| "loss": 1.0176, |
| "step": 18070 |
| }, |
| { |
| "epoch": 0.01665393978974401, |
| "grad_norm": 11.565352439880371, |
| "learning_rate": 1.9996943667306202e-07, |
| "loss": 1.0264, |
| "step": 18080 |
| }, |
| { |
| "epoch": 0.01666315103962772, |
| "grad_norm": 10.547243118286133, |
| "learning_rate": 1.9996940087597207e-07, |
| "loss": 0.995, |
| "step": 18090 |
| }, |
| { |
| "epoch": 0.016672362289511425, |
| "grad_norm": 10.588467597961426, |
| "learning_rate": 1.9996936505793405e-07, |
| "loss": 0.9634, |
| "step": 18100 |
| }, |
| { |
| "epoch": 0.016681573539395134, |
| "grad_norm": 10.875631332397461, |
| "learning_rate": 1.9996932921894797e-07, |
| "loss": 1.0217, |
| "step": 18110 |
| }, |
| { |
| "epoch": 0.01669078478927884, |
| "grad_norm": 10.276495933532715, |
| "learning_rate": 1.9996929335901382e-07, |
| "loss": 1.0429, |
| "step": 18120 |
| }, |
| { |
| "epoch": 0.01669999603916255, |
| "grad_norm": 10.738910675048828, |
| "learning_rate": 1.9996925747813164e-07, |
| "loss": 1.0286, |
| "step": 18130 |
| }, |
| { |
| "epoch": 0.01670920728904626, |
| "grad_norm": 10.95505142211914, |
| "learning_rate": 1.999692215763014e-07, |
| "loss": 0.9547, |
| "step": 18140 |
| }, |
| { |
| "epoch": 0.016718418538929965, |
| "grad_norm": 10.931917190551758, |
| "learning_rate": 1.9996918565352313e-07, |
| "loss": 1.016, |
| "step": 18150 |
| }, |
| { |
| "epoch": 0.016727629788813675, |
| "grad_norm": 9.601530075073242, |
| "learning_rate": 1.9996914970979682e-07, |
| "loss": 1.0277, |
| "step": 18160 |
| }, |
| { |
| "epoch": 0.01673684103869738, |
| "grad_norm": 10.496891021728516, |
| "learning_rate": 1.999691137451225e-07, |
| "loss": 1.0067, |
| "step": 18170 |
| }, |
| { |
| "epoch": 0.01674605228858109, |
| "grad_norm": 10.897998809814453, |
| "learning_rate": 1.9996907775950014e-07, |
| "loss": 0.9731, |
| "step": 18180 |
| }, |
| { |
| "epoch": 0.016755263538464796, |
| "grad_norm": 10.72037124633789, |
| "learning_rate": 1.9996904175292978e-07, |
| "loss": 1.0113, |
| "step": 18190 |
| }, |
| { |
| "epoch": 0.016764474788348506, |
| "grad_norm": 10.771594047546387, |
| "learning_rate": 1.9996900572541143e-07, |
| "loss": 0.9847, |
| "step": 18200 |
| }, |
| { |
| "epoch": 0.016773686038232215, |
| "grad_norm": 9.623784065246582, |
| "learning_rate": 1.9996896967694513e-07, |
| "loss": 0.9597, |
| "step": 18210 |
| }, |
| { |
| "epoch": 0.01678289728811592, |
| "grad_norm": 9.22213363647461, |
| "learning_rate": 1.9996893360753079e-07, |
| "loss": 1.0123, |
| "step": 18220 |
| }, |
| { |
| "epoch": 0.01679210853799963, |
| "grad_norm": 10.498910903930664, |
| "learning_rate": 1.999688975171685e-07, |
| "loss": 0.9981, |
| "step": 18230 |
| }, |
| { |
| "epoch": 0.016801319787883337, |
| "grad_norm": 11.86726188659668, |
| "learning_rate": 1.9996886140585825e-07, |
| "loss": 0.9784, |
| "step": 18240 |
| }, |
| { |
| "epoch": 0.016810531037767047, |
| "grad_norm": 10.20151424407959, |
| "learning_rate": 1.9996882527360002e-07, |
| "loss": 0.9558, |
| "step": 18250 |
| }, |
| { |
| "epoch": 0.016819742287650753, |
| "grad_norm": 9.883543968200684, |
| "learning_rate": 1.9996878912039383e-07, |
| "loss": 0.9995, |
| "step": 18260 |
| }, |
| { |
| "epoch": 0.016828953537534462, |
| "grad_norm": 9.660884857177734, |
| "learning_rate": 1.999687529462397e-07, |
| "loss": 0.9624, |
| "step": 18270 |
| }, |
| { |
| "epoch": 0.016838164787418168, |
| "grad_norm": 10.075861930847168, |
| "learning_rate": 1.9996871675113763e-07, |
| "loss": 0.9782, |
| "step": 18280 |
| }, |
| { |
| "epoch": 0.016847376037301878, |
| "grad_norm": 11.506403923034668, |
| "learning_rate": 1.9996868053508764e-07, |
| "loss": 1.0009, |
| "step": 18290 |
| }, |
| { |
| "epoch": 0.016856587287185587, |
| "grad_norm": 9.465668678283691, |
| "learning_rate": 1.9996864429808973e-07, |
| "loss": 1.0057, |
| "step": 18300 |
| }, |
| { |
| "epoch": 0.016865798537069293, |
| "grad_norm": 10.784385681152344, |
| "learning_rate": 1.9996860804014387e-07, |
| "loss": 1.0194, |
| "step": 18310 |
| }, |
| { |
| "epoch": 0.016875009786953003, |
| "grad_norm": 10.121374130249023, |
| "learning_rate": 1.9996857176125015e-07, |
| "loss": 1.0194, |
| "step": 18320 |
| }, |
| { |
| "epoch": 0.01688422103683671, |
| "grad_norm": 12.42760944366455, |
| "learning_rate": 1.999685354614085e-07, |
| "loss": 1.031, |
| "step": 18330 |
| }, |
| { |
| "epoch": 0.016893432286720418, |
| "grad_norm": 9.57375717163086, |
| "learning_rate": 1.9996849914061897e-07, |
| "loss": 1.0093, |
| "step": 18340 |
| }, |
| { |
| "epoch": 0.016902643536604124, |
| "grad_norm": 11.3381986618042, |
| "learning_rate": 1.9996846279888155e-07, |
| "loss": 1.0165, |
| "step": 18350 |
| }, |
| { |
| "epoch": 0.016911854786487834, |
| "grad_norm": 10.649567604064941, |
| "learning_rate": 1.9996842643619626e-07, |
| "loss": 1.0072, |
| "step": 18360 |
| }, |
| { |
| "epoch": 0.01692106603637154, |
| "grad_norm": 9.864683151245117, |
| "learning_rate": 1.999683900525631e-07, |
| "loss": 0.9946, |
| "step": 18370 |
| }, |
| { |
| "epoch": 0.01693027728625525, |
| "grad_norm": 18.890703201293945, |
| "learning_rate": 1.9996835364798205e-07, |
| "loss": 1.028, |
| "step": 18380 |
| }, |
| { |
| "epoch": 0.01693948853613896, |
| "grad_norm": 10.789216041564941, |
| "learning_rate": 1.9996831722245319e-07, |
| "loss": 1.0288, |
| "step": 18390 |
| }, |
| { |
| "epoch": 0.016948699786022665, |
| "grad_norm": 10.704526901245117, |
| "learning_rate": 1.9996828077597643e-07, |
| "loss": 1.0104, |
| "step": 18400 |
| }, |
| { |
| "epoch": 0.016957911035906374, |
| "grad_norm": 10.217658996582031, |
| "learning_rate": 1.999682443085519e-07, |
| "loss": 1.0185, |
| "step": 18410 |
| }, |
| { |
| "epoch": 0.01696712228579008, |
| "grad_norm": 10.113249778747559, |
| "learning_rate": 1.9996820782017948e-07, |
| "loss": 1.0264, |
| "step": 18420 |
| }, |
| { |
| "epoch": 0.01697633353567379, |
| "grad_norm": 11.169814109802246, |
| "learning_rate": 1.9996817131085923e-07, |
| "loss": 1.0059, |
| "step": 18430 |
| }, |
| { |
| "epoch": 0.016985544785557496, |
| "grad_norm": 9.928948402404785, |
| "learning_rate": 1.9996813478059117e-07, |
| "loss": 0.9983, |
| "step": 18440 |
| }, |
| { |
| "epoch": 0.016994756035441205, |
| "grad_norm": 9.365204811096191, |
| "learning_rate": 1.9996809822937535e-07, |
| "loss": 0.9487, |
| "step": 18450 |
| }, |
| { |
| "epoch": 0.01700396728532491, |
| "grad_norm": 11.024831771850586, |
| "learning_rate": 1.999680616572117e-07, |
| "loss": 0.9969, |
| "step": 18460 |
| }, |
| { |
| "epoch": 0.01701317853520862, |
| "grad_norm": 9.761519432067871, |
| "learning_rate": 1.9996802506410022e-07, |
| "loss": 0.9717, |
| "step": 18470 |
| }, |
| { |
| "epoch": 0.01702238978509233, |
| "grad_norm": 9.938976287841797, |
| "learning_rate": 1.99967988450041e-07, |
| "loss": 1.0228, |
| "step": 18480 |
| }, |
| { |
| "epoch": 0.017031601034976036, |
| "grad_norm": 10.473343849182129, |
| "learning_rate": 1.99967951815034e-07, |
| "loss": 1.0301, |
| "step": 18490 |
| }, |
| { |
| "epoch": 0.017040812284859746, |
| "grad_norm": 11.690329551696777, |
| "learning_rate": 1.999679151590792e-07, |
| "loss": 1.0049, |
| "step": 18500 |
| }, |
| { |
| "epoch": 0.017050023534743452, |
| "grad_norm": 11.461403846740723, |
| "learning_rate": 1.9996787848217664e-07, |
| "loss": 1.0285, |
| "step": 18510 |
| }, |
| { |
| "epoch": 0.01705923478462716, |
| "grad_norm": 9.831055641174316, |
| "learning_rate": 1.9996784178432637e-07, |
| "loss": 1.0113, |
| "step": 18520 |
| }, |
| { |
| "epoch": 0.017068446034510867, |
| "grad_norm": 10.868408203125, |
| "learning_rate": 1.999678050655283e-07, |
| "loss": 1.0166, |
| "step": 18530 |
| }, |
| { |
| "epoch": 0.017077657284394577, |
| "grad_norm": 29.964656829833984, |
| "learning_rate": 1.9996776832578252e-07, |
| "loss": 0.9968, |
| "step": 18540 |
| }, |
| { |
| "epoch": 0.017086868534278286, |
| "grad_norm": 10.79750919342041, |
| "learning_rate": 1.99967731565089e-07, |
| "loss": 0.9942, |
| "step": 18550 |
| }, |
| { |
| "epoch": 0.017096079784161992, |
| "grad_norm": 10.7560453414917, |
| "learning_rate": 1.9996769478344775e-07, |
| "loss": 1.0182, |
| "step": 18560 |
| }, |
| { |
| "epoch": 0.017105291034045702, |
| "grad_norm": 11.358363151550293, |
| "learning_rate": 1.999676579808588e-07, |
| "loss": 1.0265, |
| "step": 18570 |
| }, |
| { |
| "epoch": 0.017114502283929408, |
| "grad_norm": 9.79407787322998, |
| "learning_rate": 1.9996762115732214e-07, |
| "loss": 1.0013, |
| "step": 18580 |
| }, |
| { |
| "epoch": 0.017123713533813117, |
| "grad_norm": 10.081582069396973, |
| "learning_rate": 1.9996758431283776e-07, |
| "loss": 0.9603, |
| "step": 18590 |
| }, |
| { |
| "epoch": 0.017132924783696823, |
| "grad_norm": 11.185797691345215, |
| "learning_rate": 1.999675474474057e-07, |
| "loss": 1.041, |
| "step": 18600 |
| }, |
| { |
| "epoch": 0.017142136033580533, |
| "grad_norm": 10.238747596740723, |
| "learning_rate": 1.9996751056102595e-07, |
| "loss": 0.969, |
| "step": 18610 |
| }, |
| { |
| "epoch": 0.01715134728346424, |
| "grad_norm": 10.896879196166992, |
| "learning_rate": 1.999674736536985e-07, |
| "loss": 1.0477, |
| "step": 18620 |
| }, |
| { |
| "epoch": 0.01716055853334795, |
| "grad_norm": 10.28799819946289, |
| "learning_rate": 1.9996743672542342e-07, |
| "loss": 1.029, |
| "step": 18630 |
| }, |
| { |
| "epoch": 0.017169769783231658, |
| "grad_norm": 10.384296417236328, |
| "learning_rate": 1.9996739977620066e-07, |
| "loss": 1.0156, |
| "step": 18640 |
| }, |
| { |
| "epoch": 0.017178981033115364, |
| "grad_norm": 10.47171688079834, |
| "learning_rate": 1.9996736280603025e-07, |
| "loss": 1.0144, |
| "step": 18650 |
| }, |
| { |
| "epoch": 0.017188192282999074, |
| "grad_norm": 10.417061805725098, |
| "learning_rate": 1.9996732581491216e-07, |
| "loss": 0.9642, |
| "step": 18660 |
| }, |
| { |
| "epoch": 0.01719740353288278, |
| "grad_norm": 10.035117149353027, |
| "learning_rate": 1.9996728880284649e-07, |
| "loss": 0.9834, |
| "step": 18670 |
| }, |
| { |
| "epoch": 0.01720661478276649, |
| "grad_norm": 8.674405097961426, |
| "learning_rate": 1.9996725176983314e-07, |
| "loss": 0.9554, |
| "step": 18680 |
| }, |
| { |
| "epoch": 0.017215826032650195, |
| "grad_norm": 11.042734146118164, |
| "learning_rate": 1.9996721471587216e-07, |
| "loss": 0.9561, |
| "step": 18690 |
| }, |
| { |
| "epoch": 0.017225037282533905, |
| "grad_norm": 11.12697696685791, |
| "learning_rate": 1.9996717764096358e-07, |
| "loss": 0.998, |
| "step": 18700 |
| }, |
| { |
| "epoch": 0.01723424853241761, |
| "grad_norm": 13.253899574279785, |
| "learning_rate": 1.999671405451074e-07, |
| "loss": 0.9731, |
| "step": 18710 |
| }, |
| { |
| "epoch": 0.01724345978230132, |
| "grad_norm": 11.007993698120117, |
| "learning_rate": 1.9996710342830362e-07, |
| "loss": 0.9883, |
| "step": 18720 |
| }, |
| { |
| "epoch": 0.01725267103218503, |
| "grad_norm": 11.92739486694336, |
| "learning_rate": 1.9996706629055224e-07, |
| "loss": 1.046, |
| "step": 18730 |
| }, |
| { |
| "epoch": 0.017261882282068736, |
| "grad_norm": 11.663958549499512, |
| "learning_rate": 1.9996702913185328e-07, |
| "loss": 0.9861, |
| "step": 18740 |
| }, |
| { |
| "epoch": 0.017271093531952445, |
| "grad_norm": 11.403138160705566, |
| "learning_rate": 1.9996699195220675e-07, |
| "loss": 0.9741, |
| "step": 18750 |
| }, |
| { |
| "epoch": 0.01728030478183615, |
| "grad_norm": 10.141386032104492, |
| "learning_rate": 1.9996695475161265e-07, |
| "loss": 1.0158, |
| "step": 18760 |
| }, |
| { |
| "epoch": 0.01728951603171986, |
| "grad_norm": 10.828715324401855, |
| "learning_rate": 1.9996691753007097e-07, |
| "loss": 0.9863, |
| "step": 18770 |
| }, |
| { |
| "epoch": 0.017298727281603567, |
| "grad_norm": 10.49693489074707, |
| "learning_rate": 1.9996688028758176e-07, |
| "loss": 1.0407, |
| "step": 18780 |
| }, |
| { |
| "epoch": 0.017307938531487276, |
| "grad_norm": 9.789341926574707, |
| "learning_rate": 1.9996684302414496e-07, |
| "loss": 1.0189, |
| "step": 18790 |
| }, |
| { |
| "epoch": 0.017317149781370986, |
| "grad_norm": 9.121475219726562, |
| "learning_rate": 1.9996680573976069e-07, |
| "loss": 0.9803, |
| "step": 18800 |
| }, |
| { |
| "epoch": 0.017326361031254692, |
| "grad_norm": 9.974238395690918, |
| "learning_rate": 1.9996676843442884e-07, |
| "loss": 0.9976, |
| "step": 18810 |
| }, |
| { |
| "epoch": 0.0173355722811384, |
| "grad_norm": 9.982102394104004, |
| "learning_rate": 1.9996673110814947e-07, |
| "loss": 1.0368, |
| "step": 18820 |
| }, |
| { |
| "epoch": 0.017344783531022107, |
| "grad_norm": 11.536084175109863, |
| "learning_rate": 1.999666937609226e-07, |
| "loss": 1.0355, |
| "step": 18830 |
| }, |
| { |
| "epoch": 0.017353994780905817, |
| "grad_norm": 9.691497802734375, |
| "learning_rate": 1.9996665639274824e-07, |
| "loss": 0.9907, |
| "step": 18840 |
| }, |
| { |
| "epoch": 0.017363206030789523, |
| "grad_norm": 9.890563011169434, |
| "learning_rate": 1.9996661900362633e-07, |
| "loss": 0.9981, |
| "step": 18850 |
| }, |
| { |
| "epoch": 0.017372417280673232, |
| "grad_norm": 10.328390121459961, |
| "learning_rate": 1.9996658159355698e-07, |
| "loss": 0.9602, |
| "step": 18860 |
| }, |
| { |
| "epoch": 0.01738162853055694, |
| "grad_norm": 10.441732406616211, |
| "learning_rate": 1.999665441625401e-07, |
| "loss": 0.9705, |
| "step": 18870 |
| }, |
| { |
| "epoch": 0.017390839780440648, |
| "grad_norm": 11.1194429397583, |
| "learning_rate": 1.9996650671057577e-07, |
| "loss": 1.0097, |
| "step": 18880 |
| }, |
| { |
| "epoch": 0.017400051030324357, |
| "grad_norm": 10.592708587646484, |
| "learning_rate": 1.9996646923766398e-07, |
| "loss": 1.0186, |
| "step": 18890 |
| }, |
| { |
| "epoch": 0.017409262280208063, |
| "grad_norm": 11.26917839050293, |
| "learning_rate": 1.9996643174380473e-07, |
| "loss": 1.0496, |
| "step": 18900 |
| }, |
| { |
| "epoch": 0.017418473530091773, |
| "grad_norm": 11.008471488952637, |
| "learning_rate": 1.99966394228998e-07, |
| "loss": 0.9826, |
| "step": 18910 |
| }, |
| { |
| "epoch": 0.01742768477997548, |
| "grad_norm": 11.859643936157227, |
| "learning_rate": 1.9996635669324387e-07, |
| "loss": 0.9868, |
| "step": 18920 |
| }, |
| { |
| "epoch": 0.01743689602985919, |
| "grad_norm": 10.770267486572266, |
| "learning_rate": 1.9996631913654224e-07, |
| "loss": 1.0076, |
| "step": 18930 |
| }, |
| { |
| "epoch": 0.017446107279742894, |
| "grad_norm": 9.717227935791016, |
| "learning_rate": 1.9996628155889327e-07, |
| "loss": 1.0021, |
| "step": 18940 |
| }, |
| { |
| "epoch": 0.017455318529626604, |
| "grad_norm": 9.726884841918945, |
| "learning_rate": 1.999662439602968e-07, |
| "loss": 1.0084, |
| "step": 18950 |
| }, |
| { |
| "epoch": 0.01746452977951031, |
| "grad_norm": 11.124958038330078, |
| "learning_rate": 1.9996620634075298e-07, |
| "loss": 0.9849, |
| "step": 18960 |
| }, |
| { |
| "epoch": 0.01747374102939402, |
| "grad_norm": 15.108135223388672, |
| "learning_rate": 1.999661687002617e-07, |
| "loss": 0.9291, |
| "step": 18970 |
| }, |
| { |
| "epoch": 0.01748295227927773, |
| "grad_norm": 9.424015998840332, |
| "learning_rate": 1.9996613103882302e-07, |
| "loss": 0.9595, |
| "step": 18980 |
| }, |
| { |
| "epoch": 0.017492163529161435, |
| "grad_norm": 9.753236770629883, |
| "learning_rate": 1.99966093356437e-07, |
| "loss": 1.0274, |
| "step": 18990 |
| }, |
| { |
| "epoch": 0.017501374779045144, |
| "grad_norm": 11.57945442199707, |
| "learning_rate": 1.9996605565310358e-07, |
| "loss": 0.9736, |
| "step": 19000 |
| }, |
| { |
| "epoch": 0.01751058602892885, |
| "grad_norm": 10.876665115356445, |
| "learning_rate": 1.9996601792882278e-07, |
| "loss": 1.0045, |
| "step": 19010 |
| }, |
| { |
| "epoch": 0.01751979727881256, |
| "grad_norm": 11.156282424926758, |
| "learning_rate": 1.9996598018359462e-07, |
| "loss": 1.0478, |
| "step": 19020 |
| }, |
| { |
| "epoch": 0.017529008528696266, |
| "grad_norm": 10.212468147277832, |
| "learning_rate": 1.999659424174191e-07, |
| "loss": 1.0113, |
| "step": 19030 |
| }, |
| { |
| "epoch": 0.017538219778579976, |
| "grad_norm": 12.114211082458496, |
| "learning_rate": 1.9996590463029622e-07, |
| "loss": 0.9806, |
| "step": 19040 |
| }, |
| { |
| "epoch": 0.01754743102846368, |
| "grad_norm": 10.537737846374512, |
| "learning_rate": 1.99965866822226e-07, |
| "loss": 1.0092, |
| "step": 19050 |
| }, |
| { |
| "epoch": 0.01755664227834739, |
| "grad_norm": 10.569214820861816, |
| "learning_rate": 1.9996582899320846e-07, |
| "loss": 1.0267, |
| "step": 19060 |
| }, |
| { |
| "epoch": 0.0175658535282311, |
| "grad_norm": 11.58995532989502, |
| "learning_rate": 1.9996579114324358e-07, |
| "loss": 1.0152, |
| "step": 19070 |
| }, |
| { |
| "epoch": 0.017575064778114807, |
| "grad_norm": 12.578239440917969, |
| "learning_rate": 1.9996575327233137e-07, |
| "loss": 1.0148, |
| "step": 19080 |
| }, |
| { |
| "epoch": 0.017584276027998516, |
| "grad_norm": 10.457140922546387, |
| "learning_rate": 1.999657153804719e-07, |
| "loss": 0.9818, |
| "step": 19090 |
| }, |
| { |
| "epoch": 0.017593487277882222, |
| "grad_norm": 10.147584915161133, |
| "learning_rate": 1.999656774676651e-07, |
| "loss": 1.0279, |
| "step": 19100 |
| }, |
| { |
| "epoch": 0.01760269852776593, |
| "grad_norm": 10.02499008178711, |
| "learning_rate": 1.99965639533911e-07, |
| "loss": 0.9573, |
| "step": 19110 |
| }, |
| { |
| "epoch": 0.017611909777649638, |
| "grad_norm": 10.744659423828125, |
| "learning_rate": 1.9996560157920964e-07, |
| "loss": 0.9836, |
| "step": 19120 |
| }, |
| { |
| "epoch": 0.017621121027533347, |
| "grad_norm": 10.956788063049316, |
| "learning_rate": 1.9996556360356096e-07, |
| "loss": 1.0121, |
| "step": 19130 |
| }, |
| { |
| "epoch": 0.017630332277417057, |
| "grad_norm": 10.273798942565918, |
| "learning_rate": 1.9996552560696505e-07, |
| "loss": 1.0021, |
| "step": 19140 |
| }, |
| { |
| "epoch": 0.017639543527300763, |
| "grad_norm": 10.512602806091309, |
| "learning_rate": 1.9996548758942185e-07, |
| "loss": 1.0041, |
| "step": 19150 |
| }, |
| { |
| "epoch": 0.017648754777184472, |
| "grad_norm": 11.54510498046875, |
| "learning_rate": 1.999654495509314e-07, |
| "loss": 0.9647, |
| "step": 19160 |
| }, |
| { |
| "epoch": 0.017657966027068178, |
| "grad_norm": 8.784961700439453, |
| "learning_rate": 1.999654114914937e-07, |
| "loss": 1.0065, |
| "step": 19170 |
| }, |
| { |
| "epoch": 0.017667177276951888, |
| "grad_norm": 10.612838745117188, |
| "learning_rate": 1.9996537341110877e-07, |
| "loss": 1.0613, |
| "step": 19180 |
| }, |
| { |
| "epoch": 0.017676388526835594, |
| "grad_norm": 10.087254524230957, |
| "learning_rate": 1.9996533530977663e-07, |
| "loss": 0.9801, |
| "step": 19190 |
| }, |
| { |
| "epoch": 0.017685599776719303, |
| "grad_norm": 9.54832649230957, |
| "learning_rate": 1.9996529718749725e-07, |
| "loss": 1.0092, |
| "step": 19200 |
| }, |
| { |
| "epoch": 0.01769481102660301, |
| "grad_norm": 9.667723655700684, |
| "learning_rate": 1.9996525904427064e-07, |
| "loss": 0.999, |
| "step": 19210 |
| }, |
| { |
| "epoch": 0.01770402227648672, |
| "grad_norm": 11.09756851196289, |
| "learning_rate": 1.9996522088009683e-07, |
| "loss": 1.0046, |
| "step": 19220 |
| }, |
| { |
| "epoch": 0.017713233526370428, |
| "grad_norm": 12.496501922607422, |
| "learning_rate": 1.9996518269497585e-07, |
| "loss": 1.0048, |
| "step": 19230 |
| }, |
| { |
| "epoch": 0.017722444776254134, |
| "grad_norm": 9.831335067749023, |
| "learning_rate": 1.9996514448890765e-07, |
| "loss": 1.0108, |
| "step": 19240 |
| }, |
| { |
| "epoch": 0.017731656026137844, |
| "grad_norm": 11.670632362365723, |
| "learning_rate": 1.9996510626189228e-07, |
| "loss": 1.0413, |
| "step": 19250 |
| }, |
| { |
| "epoch": 0.01774086727602155, |
| "grad_norm": 12.234903335571289, |
| "learning_rate": 1.999650680139297e-07, |
| "loss": 1.0112, |
| "step": 19260 |
| }, |
| { |
| "epoch": 0.01775007852590526, |
| "grad_norm": 9.922277450561523, |
| "learning_rate": 1.9996502974502e-07, |
| "loss": 0.9897, |
| "step": 19270 |
| }, |
| { |
| "epoch": 0.017759289775788965, |
| "grad_norm": 12.163166999816895, |
| "learning_rate": 1.9996499145516316e-07, |
| "loss": 1.0155, |
| "step": 19280 |
| }, |
| { |
| "epoch": 0.017768501025672675, |
| "grad_norm": 9.885821342468262, |
| "learning_rate": 1.999649531443591e-07, |
| "loss": 0.9988, |
| "step": 19290 |
| }, |
| { |
| "epoch": 0.01777771227555638, |
| "grad_norm": 10.334716796875, |
| "learning_rate": 1.9996491481260795e-07, |
| "loss": 1.0045, |
| "step": 19300 |
| }, |
| { |
| "epoch": 0.01778692352544009, |
| "grad_norm": 9.623705863952637, |
| "learning_rate": 1.9996487645990965e-07, |
| "loss": 1.0123, |
| "step": 19310 |
| }, |
| { |
| "epoch": 0.0177961347753238, |
| "grad_norm": 10.25127124786377, |
| "learning_rate": 1.9996483808626424e-07, |
| "loss": 1.0297, |
| "step": 19320 |
| }, |
| { |
| "epoch": 0.017805346025207506, |
| "grad_norm": 11.38561725616455, |
| "learning_rate": 1.999647996916717e-07, |
| "loss": 0.9951, |
| "step": 19330 |
| }, |
| { |
| "epoch": 0.017814557275091215, |
| "grad_norm": 10.722535133361816, |
| "learning_rate": 1.99964761276132e-07, |
| "loss": 1.0008, |
| "step": 19340 |
| }, |
| { |
| "epoch": 0.01782376852497492, |
| "grad_norm": 11.43651294708252, |
| "learning_rate": 1.9996472283964526e-07, |
| "loss": 0.9767, |
| "step": 19350 |
| }, |
| { |
| "epoch": 0.01783297977485863, |
| "grad_norm": 9.98664379119873, |
| "learning_rate": 1.999646843822114e-07, |
| "loss": 1.0245, |
| "step": 19360 |
| }, |
| { |
| "epoch": 0.017842191024742337, |
| "grad_norm": 12.64993953704834, |
| "learning_rate": 1.999646459038305e-07, |
| "loss": 1.0491, |
| "step": 19370 |
| }, |
| { |
| "epoch": 0.017851402274626046, |
| "grad_norm": 10.285564422607422, |
| "learning_rate": 1.999646074045025e-07, |
| "loss": 0.981, |
| "step": 19380 |
| }, |
| { |
| "epoch": 0.017860613524509756, |
| "grad_norm": 12.62157154083252, |
| "learning_rate": 1.9996456888422738e-07, |
| "loss": 1.0192, |
| "step": 19390 |
| }, |
| { |
| "epoch": 0.017869824774393462, |
| "grad_norm": 9.622283935546875, |
| "learning_rate": 1.9996453034300527e-07, |
| "loss": 0.9707, |
| "step": 19400 |
| }, |
| { |
| "epoch": 0.01787903602427717, |
| "grad_norm": 10.441496849060059, |
| "learning_rate": 1.9996449178083606e-07, |
| "loss": 0.9883, |
| "step": 19410 |
| }, |
| { |
| "epoch": 0.017888247274160878, |
| "grad_norm": 10.806499481201172, |
| "learning_rate": 1.999644531977198e-07, |
| "loss": 1.0074, |
| "step": 19420 |
| }, |
| { |
| "epoch": 0.017897458524044587, |
| "grad_norm": 10.985054016113281, |
| "learning_rate": 1.9996441459365654e-07, |
| "loss": 0.968, |
| "step": 19430 |
| }, |
| { |
| "epoch": 0.017906669773928293, |
| "grad_norm": 9.944632530212402, |
| "learning_rate": 1.9996437596864625e-07, |
| "loss": 0.9708, |
| "step": 19440 |
| }, |
| { |
| "epoch": 0.017915881023812003, |
| "grad_norm": 10.183022499084473, |
| "learning_rate": 1.9996433732268888e-07, |
| "loss": 1.0128, |
| "step": 19450 |
| }, |
| { |
| "epoch": 0.01792509227369571, |
| "grad_norm": 11.46110725402832, |
| "learning_rate": 1.9996429865578455e-07, |
| "loss": 1.0039, |
| "step": 19460 |
| }, |
| { |
| "epoch": 0.017934303523579418, |
| "grad_norm": 9.25991439819336, |
| "learning_rate": 1.999642599679332e-07, |
| "loss": 1.0326, |
| "step": 19470 |
| }, |
| { |
| "epoch": 0.017943514773463128, |
| "grad_norm": 14.663468360900879, |
| "learning_rate": 1.9996422125913486e-07, |
| "loss": 1.0334, |
| "step": 19480 |
| }, |
| { |
| "epoch": 0.017952726023346834, |
| "grad_norm": 9.958480834960938, |
| "learning_rate": 1.999641825293895e-07, |
| "loss": 0.9818, |
| "step": 19490 |
| }, |
| { |
| "epoch": 0.017961937273230543, |
| "grad_norm": 10.998751640319824, |
| "learning_rate": 1.9996414377869718e-07, |
| "loss": 0.9693, |
| "step": 19500 |
| }, |
| { |
| "epoch": 0.01797114852311425, |
| "grad_norm": 9.62633228302002, |
| "learning_rate": 1.9996410500705792e-07, |
| "loss": 0.975, |
| "step": 19510 |
| }, |
| { |
| "epoch": 0.01798035977299796, |
| "grad_norm": 11.86374568939209, |
| "learning_rate": 1.9996406621447166e-07, |
| "loss": 1.0388, |
| "step": 19520 |
| }, |
| { |
| "epoch": 0.017989571022881665, |
| "grad_norm": 10.862321853637695, |
| "learning_rate": 1.9996402740093846e-07, |
| "loss": 1.0014, |
| "step": 19530 |
| }, |
| { |
| "epoch": 0.017998782272765374, |
| "grad_norm": 10.698342323303223, |
| "learning_rate": 1.999639885664583e-07, |
| "loss": 0.9656, |
| "step": 19540 |
| }, |
| { |
| "epoch": 0.01800799352264908, |
| "grad_norm": 10.943075180053711, |
| "learning_rate": 1.9996394971103122e-07, |
| "loss": 1.0483, |
| "step": 19550 |
| }, |
| { |
| "epoch": 0.01801720477253279, |
| "grad_norm": 10.057327270507812, |
| "learning_rate": 1.9996391083465718e-07, |
| "loss": 1.0081, |
| "step": 19560 |
| }, |
| { |
| "epoch": 0.0180264160224165, |
| "grad_norm": 9.806025505065918, |
| "learning_rate": 1.9996387193733623e-07, |
| "loss": 0.9818, |
| "step": 19570 |
| }, |
| { |
| "epoch": 0.018035627272300205, |
| "grad_norm": 10.33084487915039, |
| "learning_rate": 1.9996383301906833e-07, |
| "loss": 1.0038, |
| "step": 19580 |
| }, |
| { |
| "epoch": 0.018044838522183915, |
| "grad_norm": 11.348193168640137, |
| "learning_rate": 1.9996379407985358e-07, |
| "loss": 1.0678, |
| "step": 19590 |
| }, |
| { |
| "epoch": 0.01805404977206762, |
| "grad_norm": 9.638917922973633, |
| "learning_rate": 1.999637551196919e-07, |
| "loss": 1.0048, |
| "step": 19600 |
| }, |
| { |
| "epoch": 0.01806326102195133, |
| "grad_norm": 16.48502540588379, |
| "learning_rate": 1.9996371613858333e-07, |
| "loss": 0.9461, |
| "step": 19610 |
| }, |
| { |
| "epoch": 0.018072472271835036, |
| "grad_norm": 11.616415023803711, |
| "learning_rate": 1.999636771365279e-07, |
| "loss": 1.0, |
| "step": 19620 |
| }, |
| { |
| "epoch": 0.018081683521718746, |
| "grad_norm": 10.420519828796387, |
| "learning_rate": 1.9996363811352554e-07, |
| "loss": 0.9511, |
| "step": 19630 |
| }, |
| { |
| "epoch": 0.018090894771602452, |
| "grad_norm": 11.778324127197266, |
| "learning_rate": 1.9996359906957635e-07, |
| "loss": 1.0267, |
| "step": 19640 |
| }, |
| { |
| "epoch": 0.01810010602148616, |
| "grad_norm": 10.854666709899902, |
| "learning_rate": 1.999635600046803e-07, |
| "loss": 1.0082, |
| "step": 19650 |
| }, |
| { |
| "epoch": 0.01810931727136987, |
| "grad_norm": 10.418573379516602, |
| "learning_rate": 1.9996352091883742e-07, |
| "loss": 1.003, |
| "step": 19660 |
| }, |
| { |
| "epoch": 0.018118528521253577, |
| "grad_norm": 11.318225860595703, |
| "learning_rate": 1.9996348181204768e-07, |
| "loss": 0.9811, |
| "step": 19670 |
| }, |
| { |
| "epoch": 0.018127739771137286, |
| "grad_norm": 10.9239501953125, |
| "learning_rate": 1.9996344268431108e-07, |
| "loss": 0.9729, |
| "step": 19680 |
| }, |
| { |
| "epoch": 0.018136951021020992, |
| "grad_norm": 9.429734230041504, |
| "learning_rate": 1.9996340353562767e-07, |
| "loss": 1.0038, |
| "step": 19690 |
| }, |
| { |
| "epoch": 0.018146162270904702, |
| "grad_norm": 9.725273132324219, |
| "learning_rate": 1.9996336436599748e-07, |
| "loss": 1.0057, |
| "step": 19700 |
| }, |
| { |
| "epoch": 0.018155373520788408, |
| "grad_norm": 10.725884437561035, |
| "learning_rate": 1.9996332517542046e-07, |
| "loss": 0.965, |
| "step": 19710 |
| }, |
| { |
| "epoch": 0.018164584770672117, |
| "grad_norm": 10.485133171081543, |
| "learning_rate": 1.9996328596389663e-07, |
| "loss": 1.0153, |
| "step": 19720 |
| }, |
| { |
| "epoch": 0.018173796020555827, |
| "grad_norm": 10.119484901428223, |
| "learning_rate": 1.99963246731426e-07, |
| "loss": 0.9967, |
| "step": 19730 |
| }, |
| { |
| "epoch": 0.018183007270439533, |
| "grad_norm": 11.0701322555542, |
| "learning_rate": 1.9996320747800858e-07, |
| "loss": 0.9866, |
| "step": 19740 |
| }, |
| { |
| "epoch": 0.018192218520323242, |
| "grad_norm": 10.444910049438477, |
| "learning_rate": 1.999631682036444e-07, |
| "loss": 0.9404, |
| "step": 19750 |
| }, |
| { |
| "epoch": 0.01820142977020695, |
| "grad_norm": 10.108673095703125, |
| "learning_rate": 1.9996312890833345e-07, |
| "loss": 0.9851, |
| "step": 19760 |
| }, |
| { |
| "epoch": 0.018210641020090658, |
| "grad_norm": 10.55014705657959, |
| "learning_rate": 1.9996308959207572e-07, |
| "loss": 1.0128, |
| "step": 19770 |
| }, |
| { |
| "epoch": 0.018219852269974364, |
| "grad_norm": 11.285462379455566, |
| "learning_rate": 1.9996305025487129e-07, |
| "loss": 1.0094, |
| "step": 19780 |
| }, |
| { |
| "epoch": 0.018229063519858073, |
| "grad_norm": 10.655933380126953, |
| "learning_rate": 1.9996301089672007e-07, |
| "loss": 0.9822, |
| "step": 19790 |
| }, |
| { |
| "epoch": 0.01823827476974178, |
| "grad_norm": 10.46581745147705, |
| "learning_rate": 1.9996297151762213e-07, |
| "loss": 1.0305, |
| "step": 19800 |
| }, |
| { |
| "epoch": 0.01824748601962549, |
| "grad_norm": 10.82480525970459, |
| "learning_rate": 1.9996293211757746e-07, |
| "loss": 1.0237, |
| "step": 19810 |
| }, |
| { |
| "epoch": 0.0182566972695092, |
| "grad_norm": 10.655744552612305, |
| "learning_rate": 1.9996289269658609e-07, |
| "loss": 0.961, |
| "step": 19820 |
| }, |
| { |
| "epoch": 0.018265908519392905, |
| "grad_norm": 10.413631439208984, |
| "learning_rate": 1.9996285325464796e-07, |
| "loss": 1.0156, |
| "step": 19830 |
| }, |
| { |
| "epoch": 0.018275119769276614, |
| "grad_norm": 11.191102981567383, |
| "learning_rate": 1.9996281379176319e-07, |
| "loss": 1.003, |
| "step": 19840 |
| }, |
| { |
| "epoch": 0.01828433101916032, |
| "grad_norm": 13.509838104248047, |
| "learning_rate": 1.9996277430793168e-07, |
| "loss": 1.0564, |
| "step": 19850 |
| }, |
| { |
| "epoch": 0.01829354226904403, |
| "grad_norm": 9.08808708190918, |
| "learning_rate": 1.999627348031535e-07, |
| "loss": 0.943, |
| "step": 19860 |
| }, |
| { |
| "epoch": 0.018302753518927736, |
| "grad_norm": 10.271145820617676, |
| "learning_rate": 1.9996269527742866e-07, |
| "loss": 0.9976, |
| "step": 19870 |
| }, |
| { |
| "epoch": 0.018311964768811445, |
| "grad_norm": 14.261909484863281, |
| "learning_rate": 1.9996265573075713e-07, |
| "loss": 0.9864, |
| "step": 19880 |
| }, |
| { |
| "epoch": 0.01832117601869515, |
| "grad_norm": 10.179832458496094, |
| "learning_rate": 1.9996261616313895e-07, |
| "loss": 0.9943, |
| "step": 19890 |
| }, |
| { |
| "epoch": 0.01833038726857886, |
| "grad_norm": 11.026870727539062, |
| "learning_rate": 1.999625765745741e-07, |
| "loss": 1.0193, |
| "step": 19900 |
| }, |
| { |
| "epoch": 0.01833959851846257, |
| "grad_norm": 10.927704811096191, |
| "learning_rate": 1.9996253696506263e-07, |
| "loss": 1.0108, |
| "step": 19910 |
| }, |
| { |
| "epoch": 0.018348809768346276, |
| "grad_norm": 11.150653839111328, |
| "learning_rate": 1.9996249733460451e-07, |
| "loss": 0.9772, |
| "step": 19920 |
| }, |
| { |
| "epoch": 0.018358021018229986, |
| "grad_norm": 9.775253295898438, |
| "learning_rate": 1.9996245768319975e-07, |
| "loss": 1.0049, |
| "step": 19930 |
| }, |
| { |
| "epoch": 0.01836723226811369, |
| "grad_norm": 10.711517333984375, |
| "learning_rate": 1.9996241801084838e-07, |
| "loss": 0.9963, |
| "step": 19940 |
| }, |
| { |
| "epoch": 0.0183764435179974, |
| "grad_norm": 10.36391544342041, |
| "learning_rate": 1.999623783175504e-07, |
| "loss": 1.0061, |
| "step": 19950 |
| }, |
| { |
| "epoch": 0.018385654767881107, |
| "grad_norm": 8.742776870727539, |
| "learning_rate": 1.9996233860330584e-07, |
| "loss": 0.9852, |
| "step": 19960 |
| }, |
| { |
| "epoch": 0.018394866017764817, |
| "grad_norm": 10.622302055358887, |
| "learning_rate": 1.9996229886811467e-07, |
| "loss": 0.9577, |
| "step": 19970 |
| }, |
| { |
| "epoch": 0.018404077267648526, |
| "grad_norm": 10.329398155212402, |
| "learning_rate": 1.999622591119769e-07, |
| "loss": 0.9682, |
| "step": 19980 |
| }, |
| { |
| "epoch": 0.018413288517532232, |
| "grad_norm": 11.7884521484375, |
| "learning_rate": 1.9996221933489256e-07, |
| "loss": 1.0301, |
| "step": 19990 |
| }, |
| { |
| "epoch": 0.01842249976741594, |
| "grad_norm": 11.110326766967773, |
| "learning_rate": 1.9996217953686165e-07, |
| "loss": 0.9731, |
| "step": 20000 |
| }, |
| { |
| "epoch": 0.018431711017299648, |
| "grad_norm": 10.731719970703125, |
| "learning_rate": 1.999621397178842e-07, |
| "loss": 1.0008, |
| "step": 20010 |
| }, |
| { |
| "epoch": 0.018440922267183357, |
| "grad_norm": 9.869799613952637, |
| "learning_rate": 1.9996209987796017e-07, |
| "loss": 0.9524, |
| "step": 20020 |
| }, |
| { |
| "epoch": 0.018450133517067063, |
| "grad_norm": 11.25899600982666, |
| "learning_rate": 1.999620600170896e-07, |
| "loss": 1.015, |
| "step": 20030 |
| }, |
| { |
| "epoch": 0.018459344766950773, |
| "grad_norm": 10.923614501953125, |
| "learning_rate": 1.999620201352725e-07, |
| "loss": 0.9672, |
| "step": 20040 |
| }, |
| { |
| "epoch": 0.01846855601683448, |
| "grad_norm": 10.660486221313477, |
| "learning_rate": 1.9996198023250888e-07, |
| "loss": 0.9891, |
| "step": 20050 |
| }, |
| { |
| "epoch": 0.01847776726671819, |
| "grad_norm": 10.547919273376465, |
| "learning_rate": 1.9996194030879875e-07, |
| "loss": 0.9988, |
| "step": 20060 |
| }, |
| { |
| "epoch": 0.018486978516601898, |
| "grad_norm": 10.213296890258789, |
| "learning_rate": 1.999619003641421e-07, |
| "loss": 1.0069, |
| "step": 20070 |
| }, |
| { |
| "epoch": 0.018496189766485604, |
| "grad_norm": 10.62246322631836, |
| "learning_rate": 1.999618603985389e-07, |
| "loss": 1.0336, |
| "step": 20080 |
| }, |
| { |
| "epoch": 0.018505401016369313, |
| "grad_norm": 9.9528226852417, |
| "learning_rate": 1.9996182041198926e-07, |
| "loss": 0.9821, |
| "step": 20090 |
| }, |
| { |
| "epoch": 0.01851461226625302, |
| "grad_norm": 10.348485946655273, |
| "learning_rate": 1.9996178040449312e-07, |
| "loss": 1.027, |
| "step": 20100 |
| }, |
| { |
| "epoch": 0.01852382351613673, |
| "grad_norm": 9.777131080627441, |
| "learning_rate": 1.999617403760505e-07, |
| "loss": 0.9917, |
| "step": 20110 |
| }, |
| { |
| "epoch": 0.018533034766020435, |
| "grad_norm": 11.688217163085938, |
| "learning_rate": 1.9996170032666143e-07, |
| "loss": 0.9887, |
| "step": 20120 |
| }, |
| { |
| "epoch": 0.018542246015904144, |
| "grad_norm": 10.454413414001465, |
| "learning_rate": 1.999616602563259e-07, |
| "loss": 1.0177, |
| "step": 20130 |
| }, |
| { |
| "epoch": 0.01855145726578785, |
| "grad_norm": 10.621447563171387, |
| "learning_rate": 1.9996162016504387e-07, |
| "loss": 0.9843, |
| "step": 20140 |
| }, |
| { |
| "epoch": 0.01856066851567156, |
| "grad_norm": 10.644526481628418, |
| "learning_rate": 1.9996158005281543e-07, |
| "loss": 1.0162, |
| "step": 20150 |
| }, |
| { |
| "epoch": 0.01856987976555527, |
| "grad_norm": 10.924078941345215, |
| "learning_rate": 1.9996153991964055e-07, |
| "loss": 1.0017, |
| "step": 20160 |
| }, |
| { |
| "epoch": 0.018579091015438975, |
| "grad_norm": 10.699383735656738, |
| "learning_rate": 1.9996149976551924e-07, |
| "loss": 0.9726, |
| "step": 20170 |
| }, |
| { |
| "epoch": 0.018588302265322685, |
| "grad_norm": 10.478154182434082, |
| "learning_rate": 1.9996145959045152e-07, |
| "loss": 0.9788, |
| "step": 20180 |
| }, |
| { |
| "epoch": 0.01859751351520639, |
| "grad_norm": 12.824461936950684, |
| "learning_rate": 1.999614193944374e-07, |
| "loss": 0.9782, |
| "step": 20190 |
| }, |
| { |
| "epoch": 0.0186067247650901, |
| "grad_norm": 11.456897735595703, |
| "learning_rate": 1.9996137917747685e-07, |
| "loss": 0.9728, |
| "step": 20200 |
| }, |
| { |
| "epoch": 0.018615936014973807, |
| "grad_norm": 9.479973793029785, |
| "learning_rate": 1.9996133893956993e-07, |
| "loss": 0.9745, |
| "step": 20210 |
| }, |
| { |
| "epoch": 0.018625147264857516, |
| "grad_norm": 9.855260848999023, |
| "learning_rate": 1.999612986807166e-07, |
| "loss": 1.0221, |
| "step": 20220 |
| }, |
| { |
| "epoch": 0.018634358514741222, |
| "grad_norm": 11.735665321350098, |
| "learning_rate": 1.999612584009169e-07, |
| "loss": 0.9665, |
| "step": 20230 |
| }, |
| { |
| "epoch": 0.01864356976462493, |
| "grad_norm": 11.804895401000977, |
| "learning_rate": 1.9996121810017085e-07, |
| "loss": 1.0115, |
| "step": 20240 |
| }, |
| { |
| "epoch": 0.01865278101450864, |
| "grad_norm": 10.740562438964844, |
| "learning_rate": 1.9996117777847842e-07, |
| "loss": 0.9994, |
| "step": 20250 |
| }, |
| { |
| "epoch": 0.018661992264392347, |
| "grad_norm": 11.840386390686035, |
| "learning_rate": 1.9996113743583966e-07, |
| "loss": 1.0326, |
| "step": 20260 |
| }, |
| { |
| "epoch": 0.018671203514276057, |
| "grad_norm": 11.094808578491211, |
| "learning_rate": 1.9996109707225454e-07, |
| "loss": 1.0243, |
| "step": 20270 |
| }, |
| { |
| "epoch": 0.018680414764159763, |
| "grad_norm": 10.186753273010254, |
| "learning_rate": 1.999610566877231e-07, |
| "loss": 0.9799, |
| "step": 20280 |
| }, |
| { |
| "epoch": 0.018689626014043472, |
| "grad_norm": 11.063421249389648, |
| "learning_rate": 1.9996101628224533e-07, |
| "loss": 1.0239, |
| "step": 20290 |
| }, |
| { |
| "epoch": 0.018698837263927178, |
| "grad_norm": 9.710027694702148, |
| "learning_rate": 1.9996097585582125e-07, |
| "loss": 0.9708, |
| "step": 20300 |
| }, |
| { |
| "epoch": 0.018708048513810888, |
| "grad_norm": 11.756840705871582, |
| "learning_rate": 1.9996093540845086e-07, |
| "loss": 1.0278, |
| "step": 20310 |
| }, |
| { |
| "epoch": 0.018717259763694597, |
| "grad_norm": 10.109110832214355, |
| "learning_rate": 1.9996089494013416e-07, |
| "loss": 0.9563, |
| "step": 20320 |
| }, |
| { |
| "epoch": 0.018726471013578303, |
| "grad_norm": 9.724493026733398, |
| "learning_rate": 1.9996085445087116e-07, |
| "loss": 1.0114, |
| "step": 20330 |
| }, |
| { |
| "epoch": 0.018735682263462013, |
| "grad_norm": 9.896427154541016, |
| "learning_rate": 1.999608139406619e-07, |
| "loss": 0.9755, |
| "step": 20340 |
| }, |
| { |
| "epoch": 0.01874489351334572, |
| "grad_norm": 10.75046443939209, |
| "learning_rate": 1.9996077340950633e-07, |
| "loss": 1.0275, |
| "step": 20350 |
| }, |
| { |
| "epoch": 0.018754104763229428, |
| "grad_norm": 10.766581535339355, |
| "learning_rate": 1.9996073285740453e-07, |
| "loss": 1.0287, |
| "step": 20360 |
| }, |
| { |
| "epoch": 0.018763316013113134, |
| "grad_norm": 10.327228546142578, |
| "learning_rate": 1.9996069228435645e-07, |
| "loss": 1.0022, |
| "step": 20370 |
| }, |
| { |
| "epoch": 0.018772527262996844, |
| "grad_norm": 10.792991638183594, |
| "learning_rate": 1.9996065169036214e-07, |
| "loss": 1.0172, |
| "step": 20380 |
| }, |
| { |
| "epoch": 0.01878173851288055, |
| "grad_norm": 9.807416915893555, |
| "learning_rate": 1.9996061107542158e-07, |
| "loss": 1.0175, |
| "step": 20390 |
| }, |
| { |
| "epoch": 0.01879094976276426, |
| "grad_norm": 10.318675994873047, |
| "learning_rate": 1.999605704395348e-07, |
| "loss": 0.9841, |
| "step": 20400 |
| }, |
| { |
| "epoch": 0.01880016101264797, |
| "grad_norm": 10.774794578552246, |
| "learning_rate": 1.9996052978270176e-07, |
| "loss": 0.9796, |
| "step": 20410 |
| }, |
| { |
| "epoch": 0.018809372262531675, |
| "grad_norm": 10.019353866577148, |
| "learning_rate": 1.9996048910492253e-07, |
| "loss": 0.9473, |
| "step": 20420 |
| }, |
| { |
| "epoch": 0.018818583512415384, |
| "grad_norm": 10.962063789367676, |
| "learning_rate": 1.999604484061971e-07, |
| "loss": 0.9746, |
| "step": 20430 |
| }, |
| { |
| "epoch": 0.01882779476229909, |
| "grad_norm": 12.321850776672363, |
| "learning_rate": 1.9996040768652544e-07, |
| "loss": 0.983, |
| "step": 20440 |
| }, |
| { |
| "epoch": 0.0188370060121828, |
| "grad_norm": 10.552327156066895, |
| "learning_rate": 1.999603669459076e-07, |
| "loss": 1.0436, |
| "step": 20450 |
| }, |
| { |
| "epoch": 0.018846217262066506, |
| "grad_norm": 10.687848091125488, |
| "learning_rate": 1.999603261843436e-07, |
| "loss": 0.9399, |
| "step": 20460 |
| }, |
| { |
| "epoch": 0.018855428511950215, |
| "grad_norm": 9.804264068603516, |
| "learning_rate": 1.9996028540183344e-07, |
| "loss": 0.9881, |
| "step": 20470 |
| }, |
| { |
| "epoch": 0.01886463976183392, |
| "grad_norm": 9.700034141540527, |
| "learning_rate": 1.999602445983771e-07, |
| "loss": 0.9698, |
| "step": 20480 |
| }, |
| { |
| "epoch": 0.01887385101171763, |
| "grad_norm": 12.27613353729248, |
| "learning_rate": 1.9996020377397458e-07, |
| "loss": 0.9995, |
| "step": 20490 |
| }, |
| { |
| "epoch": 0.01888306226160134, |
| "grad_norm": 14.351017951965332, |
| "learning_rate": 1.9996016292862591e-07, |
| "loss": 1.0115, |
| "step": 20500 |
| }, |
| { |
| "epoch": 0.018892273511485046, |
| "grad_norm": 10.670650482177734, |
| "learning_rate": 1.9996012206233116e-07, |
| "loss": 0.9659, |
| "step": 20510 |
| }, |
| { |
| "epoch": 0.018901484761368756, |
| "grad_norm": 11.627557754516602, |
| "learning_rate": 1.9996008117509023e-07, |
| "loss": 0.9551, |
| "step": 20520 |
| }, |
| { |
| "epoch": 0.018910696011252462, |
| "grad_norm": 10.185769081115723, |
| "learning_rate": 1.999600402669032e-07, |
| "loss": 0.9963, |
| "step": 20530 |
| }, |
| { |
| "epoch": 0.01891990726113617, |
| "grad_norm": 10.86068344116211, |
| "learning_rate": 1.9995999933777003e-07, |
| "loss": 0.9694, |
| "step": 20540 |
| }, |
| { |
| "epoch": 0.018929118511019877, |
| "grad_norm": 11.024727821350098, |
| "learning_rate": 1.999599583876908e-07, |
| "loss": 0.9851, |
| "step": 20550 |
| }, |
| { |
| "epoch": 0.018938329760903587, |
| "grad_norm": 10.428853988647461, |
| "learning_rate": 1.9995991741666542e-07, |
| "loss": 0.977, |
| "step": 20560 |
| }, |
| { |
| "epoch": 0.018947541010787296, |
| "grad_norm": 10.000954627990723, |
| "learning_rate": 1.99959876424694e-07, |
| "loss": 1.0128, |
| "step": 20570 |
| }, |
| { |
| "epoch": 0.018956752260671002, |
| "grad_norm": 10.777172088623047, |
| "learning_rate": 1.9995983541177647e-07, |
| "loss": 1.0077, |
| "step": 20580 |
| }, |
| { |
| "epoch": 0.018965963510554712, |
| "grad_norm": 10.307262420654297, |
| "learning_rate": 1.999597943779129e-07, |
| "loss": 1.0021, |
| "step": 20590 |
| }, |
| { |
| "epoch": 0.018975174760438418, |
| "grad_norm": 10.650036811828613, |
| "learning_rate": 1.9995975332310326e-07, |
| "loss": 0.9929, |
| "step": 20600 |
| }, |
| { |
| "epoch": 0.018984386010322127, |
| "grad_norm": 10.3497953414917, |
| "learning_rate": 1.9995971224734756e-07, |
| "loss": 0.9734, |
| "step": 20610 |
| }, |
| { |
| "epoch": 0.018993597260205834, |
| "grad_norm": 10.647721290588379, |
| "learning_rate": 1.999596711506458e-07, |
| "loss": 1.0101, |
| "step": 20620 |
| }, |
| { |
| "epoch": 0.019002808510089543, |
| "grad_norm": 11.1587553024292, |
| "learning_rate": 1.9995963003299803e-07, |
| "loss": 1.0156, |
| "step": 20630 |
| }, |
| { |
| "epoch": 0.01901201975997325, |
| "grad_norm": 10.917261123657227, |
| "learning_rate": 1.999595888944042e-07, |
| "loss": 0.97, |
| "step": 20640 |
| }, |
| { |
| "epoch": 0.01902123100985696, |
| "grad_norm": 9.754059791564941, |
| "learning_rate": 1.999595477348644e-07, |
| "loss": 0.9617, |
| "step": 20650 |
| }, |
| { |
| "epoch": 0.019030442259740668, |
| "grad_norm": 12.903704643249512, |
| "learning_rate": 1.9995950655437857e-07, |
| "loss": 1.0124, |
| "step": 20660 |
| }, |
| { |
| "epoch": 0.019039653509624374, |
| "grad_norm": 10.363020896911621, |
| "learning_rate": 1.9995946535294673e-07, |
| "loss": 1.033, |
| "step": 20670 |
| }, |
| { |
| "epoch": 0.019048864759508084, |
| "grad_norm": 10.66309642791748, |
| "learning_rate": 1.999594241305689e-07, |
| "loss": 0.9854, |
| "step": 20680 |
| }, |
| { |
| "epoch": 0.01905807600939179, |
| "grad_norm": 11.438106536865234, |
| "learning_rate": 1.999593828872451e-07, |
| "loss": 0.9784, |
| "step": 20690 |
| }, |
| { |
| "epoch": 0.0190672872592755, |
| "grad_norm": 10.802545547485352, |
| "learning_rate": 1.9995934162297533e-07, |
| "loss": 0.9856, |
| "step": 20700 |
| }, |
| { |
| "epoch": 0.019076498509159205, |
| "grad_norm": 10.857063293457031, |
| "learning_rate": 1.9995930033775955e-07, |
| "loss": 0.975, |
| "step": 20710 |
| }, |
| { |
| "epoch": 0.019085709759042915, |
| "grad_norm": 8.290727615356445, |
| "learning_rate": 1.9995925903159786e-07, |
| "loss": 0.9805, |
| "step": 20720 |
| }, |
| { |
| "epoch": 0.01909492100892662, |
| "grad_norm": 9.859150886535645, |
| "learning_rate": 1.999592177044902e-07, |
| "loss": 1.0007, |
| "step": 20730 |
| }, |
| { |
| "epoch": 0.01910413225881033, |
| "grad_norm": 11.64376449584961, |
| "learning_rate": 1.999591763564366e-07, |
| "loss": 0.9639, |
| "step": 20740 |
| }, |
| { |
| "epoch": 0.01911334350869404, |
| "grad_norm": 10.379778861999512, |
| "learning_rate": 1.9995913498743706e-07, |
| "loss": 1.0089, |
| "step": 20750 |
| }, |
| { |
| "epoch": 0.019122554758577746, |
| "grad_norm": 10.350616455078125, |
| "learning_rate": 1.999590935974916e-07, |
| "loss": 1.0172, |
| "step": 20760 |
| }, |
| { |
| "epoch": 0.019131766008461455, |
| "grad_norm": 11.135503768920898, |
| "learning_rate": 1.9995905218660022e-07, |
| "loss": 1.0029, |
| "step": 20770 |
| }, |
| { |
| "epoch": 0.01914097725834516, |
| "grad_norm": 9.846102714538574, |
| "learning_rate": 1.9995901075476298e-07, |
| "loss": 0.9848, |
| "step": 20780 |
| }, |
| { |
| "epoch": 0.01915018850822887, |
| "grad_norm": 11.135740280151367, |
| "learning_rate": 1.999589693019798e-07, |
| "loss": 0.9648, |
| "step": 20790 |
| }, |
| { |
| "epoch": 0.019159399758112577, |
| "grad_norm": 9.764442443847656, |
| "learning_rate": 1.9995892782825076e-07, |
| "loss": 0.9829, |
| "step": 20800 |
| }, |
| { |
| "epoch": 0.019168611007996286, |
| "grad_norm": 12.054460525512695, |
| "learning_rate": 1.999588863335758e-07, |
| "loss": 0.9921, |
| "step": 20810 |
| }, |
| { |
| "epoch": 0.019177822257879992, |
| "grad_norm": 10.409029006958008, |
| "learning_rate": 1.99958844817955e-07, |
| "loss": 0.9808, |
| "step": 20820 |
| }, |
| { |
| "epoch": 0.019187033507763702, |
| "grad_norm": 10.523197174072266, |
| "learning_rate": 1.999588032813883e-07, |
| "loss": 0.9691, |
| "step": 20830 |
| }, |
| { |
| "epoch": 0.01919624475764741, |
| "grad_norm": 11.11384391784668, |
| "learning_rate": 1.999587617238758e-07, |
| "loss": 1.0213, |
| "step": 20840 |
| }, |
| { |
| "epoch": 0.019205456007531117, |
| "grad_norm": 9.169964790344238, |
| "learning_rate": 1.9995872014541744e-07, |
| "loss": 1.0096, |
| "step": 20850 |
| }, |
| { |
| "epoch": 0.019214667257414827, |
| "grad_norm": 9.933364868164062, |
| "learning_rate": 1.9995867854601323e-07, |
| "loss": 1.0092, |
| "step": 20860 |
| }, |
| { |
| "epoch": 0.019223878507298533, |
| "grad_norm": 9.901457786560059, |
| "learning_rate": 1.999586369256632e-07, |
| "loss": 1.0012, |
| "step": 20870 |
| }, |
| { |
| "epoch": 0.019233089757182242, |
| "grad_norm": 10.636425018310547, |
| "learning_rate": 1.9995859528436734e-07, |
| "loss": 1.0588, |
| "step": 20880 |
| }, |
| { |
| "epoch": 0.01924230100706595, |
| "grad_norm": 11.2418794631958, |
| "learning_rate": 1.9995855362212568e-07, |
| "loss": 0.9862, |
| "step": 20890 |
| }, |
| { |
| "epoch": 0.019251512256949658, |
| "grad_norm": 10.67471694946289, |
| "learning_rate": 1.9995851193893822e-07, |
| "loss": 0.974, |
| "step": 20900 |
| }, |
| { |
| "epoch": 0.019260723506833367, |
| "grad_norm": 10.407637596130371, |
| "learning_rate": 1.9995847023480498e-07, |
| "loss": 0.9643, |
| "step": 20910 |
| }, |
| { |
| "epoch": 0.019269934756717073, |
| "grad_norm": 10.72350025177002, |
| "learning_rate": 1.9995842850972594e-07, |
| "loss": 1.0547, |
| "step": 20920 |
| }, |
| { |
| "epoch": 0.019279146006600783, |
| "grad_norm": 9.38415813446045, |
| "learning_rate": 1.9995838676370112e-07, |
| "loss": 0.9694, |
| "step": 20930 |
| }, |
| { |
| "epoch": 0.01928835725648449, |
| "grad_norm": 10.669807434082031, |
| "learning_rate": 1.9995834499673054e-07, |
| "loss": 0.9881, |
| "step": 20940 |
| }, |
| { |
| "epoch": 0.0192975685063682, |
| "grad_norm": 14.225835800170898, |
| "learning_rate": 1.999583032088142e-07, |
| "loss": 0.9752, |
| "step": 20950 |
| }, |
| { |
| "epoch": 0.019306779756251904, |
| "grad_norm": 37.22139358520508, |
| "learning_rate": 1.999582613999521e-07, |
| "loss": 1.009, |
| "step": 20960 |
| }, |
| { |
| "epoch": 0.019315991006135614, |
| "grad_norm": 9.882749557495117, |
| "learning_rate": 1.9995821957014428e-07, |
| "loss": 0.9913, |
| "step": 20970 |
| }, |
| { |
| "epoch": 0.01932520225601932, |
| "grad_norm": 10.034158706665039, |
| "learning_rate": 1.9995817771939075e-07, |
| "loss": 1.0146, |
| "step": 20980 |
| }, |
| { |
| "epoch": 0.01933441350590303, |
| "grad_norm": 11.496566772460938, |
| "learning_rate": 1.9995813584769145e-07, |
| "loss": 0.9691, |
| "step": 20990 |
| }, |
| { |
| "epoch": 0.01934362475578674, |
| "grad_norm": 20.696666717529297, |
| "learning_rate": 1.9995809395504645e-07, |
| "loss": 0.9579, |
| "step": 21000 |
| } |
| ], |
| "logging_steps": 10, |
| "max_steps": 2171258, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 2, |
| "save_steps": 3000, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 0.0, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|