| { |
| "best_metric": 0.16133035331418843, |
| "best_model_checkpoint": "./distilhubert-phoneme/checkpoint-39226", |
| "epoch": 21.99957953749124, |
| "global_step": 39226, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.01, |
| "learning_rate": 3.0000000000000004e-07, |
| "loss": 10.2401, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.01, |
| "learning_rate": 6.000000000000001e-07, |
| "loss": 9.8611, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 9e-07, |
| "loss": 10.1736, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.02, |
| "learning_rate": 1.2000000000000002e-06, |
| "loss": 9.7808, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.5e-06, |
| "loss": 10.3918, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.03, |
| "learning_rate": 1.8e-06, |
| "loss": 9.6247, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 2.1000000000000002e-06, |
| "loss": 9.8085, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.04, |
| "learning_rate": 2.4000000000000003e-06, |
| "loss": 9.5755, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.05, |
| "learning_rate": 2.7e-06, |
| "loss": 9.4187, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 3e-06, |
| "loss": 9.9386, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.06, |
| "learning_rate": 3.3e-06, |
| "loss": 8.9904, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 3.6e-06, |
| "loss": 9.0428, |
| "step": 120 |
| }, |
| { |
| "epoch": 0.07, |
| "learning_rate": 3.9e-06, |
| "loss": 8.4537, |
| "step": 130 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 4.2000000000000004e-06, |
| "loss": 8.3071, |
| "step": 140 |
| }, |
| { |
| "epoch": 0.08, |
| "learning_rate": 4.5e-06, |
| "loss": 7.6865, |
| "step": 150 |
| }, |
| { |
| "epoch": 0.09, |
| "learning_rate": 4.800000000000001e-06, |
| "loss": 7.0795, |
| "step": 160 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 5.070000000000001e-06, |
| "loss": 6.8467, |
| "step": 170 |
| }, |
| { |
| "epoch": 0.1, |
| "learning_rate": 5.37e-06, |
| "loss": 6.2833, |
| "step": 180 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 5.67e-06, |
| "loss": 5.9109, |
| "step": 190 |
| }, |
| { |
| "epoch": 0.11, |
| "learning_rate": 5.9700000000000004e-06, |
| "loss": 5.7842, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 6.27e-06, |
| "loss": 5.1374, |
| "step": 210 |
| }, |
| { |
| "epoch": 0.12, |
| "learning_rate": 6.57e-06, |
| "loss": 4.7952, |
| "step": 220 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 6.87e-06, |
| "loss": 4.5664, |
| "step": 230 |
| }, |
| { |
| "epoch": 0.13, |
| "learning_rate": 7.17e-06, |
| "loss": 4.3286, |
| "step": 240 |
| }, |
| { |
| "epoch": 0.14, |
| "learning_rate": 7.4700000000000005e-06, |
| "loss": 4.3067, |
| "step": 250 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 7.77e-06, |
| "loss": 4.0605, |
| "step": 260 |
| }, |
| { |
| "epoch": 0.15, |
| "learning_rate": 8.07e-06, |
| "loss": 3.864, |
| "step": 270 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 8.370000000000001e-06, |
| "loss": 3.7531, |
| "step": 280 |
| }, |
| { |
| "epoch": 0.16, |
| "learning_rate": 8.67e-06, |
| "loss": 3.649, |
| "step": 290 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 8.97e-06, |
| "loss": 3.6974, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.17, |
| "learning_rate": 9.27e-06, |
| "loss": 3.4911, |
| "step": 310 |
| }, |
| { |
| "epoch": 0.18, |
| "learning_rate": 9.57e-06, |
| "loss": 3.4349, |
| "step": 320 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 9.87e-06, |
| "loss": 3.3519, |
| "step": 330 |
| }, |
| { |
| "epoch": 0.19, |
| "learning_rate": 1.0170000000000001e-05, |
| "loss": 3.2832, |
| "step": 340 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.047e-05, |
| "loss": 3.2937, |
| "step": 350 |
| }, |
| { |
| "epoch": 0.2, |
| "learning_rate": 1.077e-05, |
| "loss": 3.2283, |
| "step": 360 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.107e-05, |
| "loss": 3.1493, |
| "step": 370 |
| }, |
| { |
| "epoch": 0.21, |
| "learning_rate": 1.137e-05, |
| "loss": 3.1129, |
| "step": 380 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.167e-05, |
| "loss": 3.0546, |
| "step": 390 |
| }, |
| { |
| "epoch": 0.22, |
| "learning_rate": 1.197e-05, |
| "loss": 3.075, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.23, |
| "learning_rate": 1.227e-05, |
| "loss": 2.983, |
| "step": 410 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.257e-05, |
| "loss": 2.9635, |
| "step": 420 |
| }, |
| { |
| "epoch": 0.24, |
| "learning_rate": 1.287e-05, |
| "loss": 2.9188, |
| "step": 430 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.3170000000000001e-05, |
| "loss": 2.8841, |
| "step": 440 |
| }, |
| { |
| "epoch": 0.25, |
| "learning_rate": 1.3470000000000001e-05, |
| "loss": 2.9156, |
| "step": 450 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.377e-05, |
| "loss": 2.8061, |
| "step": 460 |
| }, |
| { |
| "epoch": 0.26, |
| "learning_rate": 1.4069999999999999e-05, |
| "loss": 2.7802, |
| "step": 470 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.437e-05, |
| "loss": 2.7335, |
| "step": 480 |
| }, |
| { |
| "epoch": 0.27, |
| "learning_rate": 1.467e-05, |
| "loss": 2.738, |
| "step": 490 |
| }, |
| { |
| "epoch": 0.28, |
| "learning_rate": 1.497e-05, |
| "loss": 2.7594, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.527e-05, |
| "loss": 2.6636, |
| "step": 510 |
| }, |
| { |
| "epoch": 0.29, |
| "learning_rate": 1.5570000000000002e-05, |
| "loss": 2.6438, |
| "step": 520 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.5870000000000002e-05, |
| "loss": 2.6305, |
| "step": 530 |
| }, |
| { |
| "epoch": 0.3, |
| "learning_rate": 1.6170000000000003e-05, |
| "loss": 2.608, |
| "step": 540 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.6470000000000003e-05, |
| "loss": 2.6374, |
| "step": 550 |
| }, |
| { |
| "epoch": 0.31, |
| "learning_rate": 1.677e-05, |
| "loss": 2.5592, |
| "step": 560 |
| }, |
| { |
| "epoch": 0.32, |
| "learning_rate": 1.7069999999999998e-05, |
| "loss": 2.5626, |
| "step": 570 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.7369999999999998e-05, |
| "loss": 2.5306, |
| "step": 580 |
| }, |
| { |
| "epoch": 0.33, |
| "learning_rate": 1.767e-05, |
| "loss": 2.5255, |
| "step": 590 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.797e-05, |
| "loss": 2.5519, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.34, |
| "learning_rate": 1.827e-05, |
| "loss": 2.4987, |
| "step": 610 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.857e-05, |
| "loss": 2.4991, |
| "step": 620 |
| }, |
| { |
| "epoch": 0.35, |
| "learning_rate": 1.887e-05, |
| "loss": 2.4842, |
| "step": 630 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.917e-05, |
| "loss": 2.4732, |
| "step": 640 |
| }, |
| { |
| "epoch": 0.36, |
| "learning_rate": 1.947e-05, |
| "loss": 2.5251, |
| "step": 650 |
| }, |
| { |
| "epoch": 0.37, |
| "learning_rate": 1.9770000000000002e-05, |
| "loss": 2.4668, |
| "step": 660 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 2.0070000000000003e-05, |
| "loss": 2.4548, |
| "step": 670 |
| }, |
| { |
| "epoch": 0.38, |
| "learning_rate": 2.0370000000000003e-05, |
| "loss": 2.4548, |
| "step": 680 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 2.067e-05, |
| "loss": 2.4671, |
| "step": 690 |
| }, |
| { |
| "epoch": 0.39, |
| "learning_rate": 2.097e-05, |
| "loss": 2.5052, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 2.1269999999999998e-05, |
| "loss": 2.4331, |
| "step": 710 |
| }, |
| { |
| "epoch": 0.4, |
| "learning_rate": 2.157e-05, |
| "loss": 2.4412, |
| "step": 720 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 2.187e-05, |
| "loss": 2.445, |
| "step": 730 |
| }, |
| { |
| "epoch": 0.41, |
| "learning_rate": 2.217e-05, |
| "loss": 2.4363, |
| "step": 740 |
| }, |
| { |
| "epoch": 0.42, |
| "learning_rate": 2.247e-05, |
| "loss": 2.4654, |
| "step": 750 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 2.277e-05, |
| "loss": 2.4304, |
| "step": 760 |
| }, |
| { |
| "epoch": 0.43, |
| "learning_rate": 2.307e-05, |
| "loss": 2.4183, |
| "step": 770 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 2.337e-05, |
| "loss": 2.4133, |
| "step": 780 |
| }, |
| { |
| "epoch": 0.44, |
| "learning_rate": 2.3670000000000002e-05, |
| "loss": 2.4157, |
| "step": 790 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 2.3970000000000003e-05, |
| "loss": 2.388, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.45, |
| "learning_rate": 2.4270000000000003e-05, |
| "loss": 2.1665, |
| "step": 810 |
| }, |
| { |
| "epoch": 0.46, |
| "learning_rate": 2.457e-05, |
| "loss": 2.073, |
| "step": 820 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.487e-05, |
| "loss": 2.0212, |
| "step": 830 |
| }, |
| { |
| "epoch": 0.47, |
| "learning_rate": 2.517e-05, |
| "loss": 2.0148, |
| "step": 840 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.547e-05, |
| "loss": 2.0511, |
| "step": 850 |
| }, |
| { |
| "epoch": 0.48, |
| "learning_rate": 2.577e-05, |
| "loss": 1.9919, |
| "step": 860 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.607e-05, |
| "loss": 1.9913, |
| "step": 870 |
| }, |
| { |
| "epoch": 0.49, |
| "learning_rate": 2.637e-05, |
| "loss": 1.9962, |
| "step": 880 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.667e-05, |
| "loss": 1.9867, |
| "step": 890 |
| }, |
| { |
| "epoch": 0.5, |
| "learning_rate": 2.697e-05, |
| "loss": 2.0495, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.51, |
| "learning_rate": 2.727e-05, |
| "loss": 1.9769, |
| "step": 910 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 2.7570000000000002e-05, |
| "loss": 1.9789, |
| "step": 920 |
| }, |
| { |
| "epoch": 0.52, |
| "learning_rate": 2.7870000000000003e-05, |
| "loss": 1.9749, |
| "step": 930 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 2.817e-05, |
| "loss": 1.9828, |
| "step": 940 |
| }, |
| { |
| "epoch": 0.53, |
| "learning_rate": 2.847e-05, |
| "loss": 2.0373, |
| "step": 950 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 2.877e-05, |
| "loss": 1.9715, |
| "step": 960 |
| }, |
| { |
| "epoch": 0.54, |
| "learning_rate": 2.907e-05, |
| "loss": 1.9757, |
| "step": 970 |
| }, |
| { |
| "epoch": 0.55, |
| "learning_rate": 2.9370000000000002e-05, |
| "loss": 1.9664, |
| "step": 980 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 2.967e-05, |
| "loss": 1.9755, |
| "step": 990 |
| }, |
| { |
| "epoch": 0.56, |
| "learning_rate": 2.997e-05, |
| "loss": 2.0059, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 2.9997452349499906e-05, |
| "loss": 1.9672, |
| "step": 1010 |
| }, |
| { |
| "epoch": 0.57, |
| "learning_rate": 2.9994621626722026e-05, |
| "loss": 1.9579, |
| "step": 1020 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 2.999179090394414e-05, |
| "loss": 1.9578, |
| "step": 1030 |
| }, |
| { |
| "epoch": 0.58, |
| "learning_rate": 2.9988960181166257e-05, |
| "loss": 1.9669, |
| "step": 1040 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 2.9986129458388376e-05, |
| "loss": 2.0004, |
| "step": 1050 |
| }, |
| { |
| "epoch": 0.59, |
| "learning_rate": 2.9983298735610496e-05, |
| "loss": 1.9591, |
| "step": 1060 |
| }, |
| { |
| "epoch": 0.6, |
| "learning_rate": 2.998046801283261e-05, |
| "loss": 1.955, |
| "step": 1070 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 2.9977637290054727e-05, |
| "loss": 1.9562, |
| "step": 1080 |
| }, |
| { |
| "epoch": 0.61, |
| "learning_rate": 2.9974806567276843e-05, |
| "loss": 1.9581, |
| "step": 1090 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 2.9971975844498965e-05, |
| "loss": 1.9903, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.62, |
| "learning_rate": 2.996914512172108e-05, |
| "loss": 1.9504, |
| "step": 1110 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 2.9966314398943197e-05, |
| "loss": 1.9529, |
| "step": 1120 |
| }, |
| { |
| "epoch": 0.63, |
| "learning_rate": 2.9963483676165313e-05, |
| "loss": 1.9524, |
| "step": 1130 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 2.9960652953387432e-05, |
| "loss": 1.9605, |
| "step": 1140 |
| }, |
| { |
| "epoch": 0.64, |
| "learning_rate": 2.995782223060955e-05, |
| "loss": 2.0009, |
| "step": 1150 |
| }, |
| { |
| "epoch": 0.65, |
| "learning_rate": 2.9954991507831667e-05, |
| "loss": 1.9491, |
| "step": 1160 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 2.9952160785053783e-05, |
| "loss": 1.9479, |
| "step": 1170 |
| }, |
| { |
| "epoch": 0.66, |
| "learning_rate": 2.99493300622759e-05, |
| "loss": 1.9563, |
| "step": 1180 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 2.994649933949802e-05, |
| "loss": 1.956, |
| "step": 1190 |
| }, |
| { |
| "epoch": 0.67, |
| "learning_rate": 2.9943668616720137e-05, |
| "loss": 1.9744, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 2.9940837893942253e-05, |
| "loss": 1.9454, |
| "step": 1210 |
| }, |
| { |
| "epoch": 0.68, |
| "learning_rate": 2.993800717116437e-05, |
| "loss": 1.9481, |
| "step": 1220 |
| }, |
| { |
| "epoch": 0.69, |
| "learning_rate": 2.993517644838649e-05, |
| "loss": 1.9459, |
| "step": 1230 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 2.9932345725608607e-05, |
| "loss": 1.9461, |
| "step": 1240 |
| }, |
| { |
| "epoch": 0.7, |
| "learning_rate": 2.9929515002830723e-05, |
| "loss": 1.9778, |
| "step": 1250 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 2.992668428005284e-05, |
| "loss": 1.9405, |
| "step": 1260 |
| }, |
| { |
| "epoch": 0.71, |
| "learning_rate": 2.9923853557274958e-05, |
| "loss": 1.9456, |
| "step": 1270 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 2.9921022834497077e-05, |
| "loss": 1.9474, |
| "step": 1280 |
| }, |
| { |
| "epoch": 0.72, |
| "learning_rate": 2.9918192111719193e-05, |
| "loss": 1.9511, |
| "step": 1290 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 2.991536138894131e-05, |
| "loss": 1.9714, |
| "step": 1300 |
| }, |
| { |
| "epoch": 0.73, |
| "learning_rate": 2.9912530666163428e-05, |
| "loss": 1.9468, |
| "step": 1310 |
| }, |
| { |
| "epoch": 0.74, |
| "learning_rate": 2.9909699943385547e-05, |
| "loss": 1.9504, |
| "step": 1320 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 2.9906869220607663e-05, |
| "loss": 1.9436, |
| "step": 1330 |
| }, |
| { |
| "epoch": 0.75, |
| "learning_rate": 2.990403849782978e-05, |
| "loss": 1.9453, |
| "step": 1340 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 2.9901207775051898e-05, |
| "loss": 1.9684, |
| "step": 1350 |
| }, |
| { |
| "epoch": 0.76, |
| "learning_rate": 2.9898377052274014e-05, |
| "loss": 1.9379, |
| "step": 1360 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 2.9895546329496133e-05, |
| "loss": 1.9398, |
| "step": 1370 |
| }, |
| { |
| "epoch": 0.77, |
| "learning_rate": 2.989271560671825e-05, |
| "loss": 1.9374, |
| "step": 1380 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 2.9889884883940368e-05, |
| "loss": 1.9391, |
| "step": 1390 |
| }, |
| { |
| "epoch": 0.78, |
| "learning_rate": 2.9887054161162484e-05, |
| "loss": 1.9679, |
| "step": 1400 |
| }, |
| { |
| "epoch": 0.79, |
| "learning_rate": 2.9884223438384603e-05, |
| "loss": 1.9384, |
| "step": 1410 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 2.988139271560672e-05, |
| "loss": 1.9436, |
| "step": 1420 |
| }, |
| { |
| "epoch": 0.8, |
| "learning_rate": 2.9878561992828838e-05, |
| "loss": 1.9355, |
| "step": 1430 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 2.9875731270050954e-05, |
| "loss": 1.9476, |
| "step": 1440 |
| }, |
| { |
| "epoch": 0.81, |
| "learning_rate": 2.987290054727307e-05, |
| "loss": 1.9595, |
| "step": 1450 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 2.987006982449519e-05, |
| "loss": 1.9384, |
| "step": 1460 |
| }, |
| { |
| "epoch": 0.82, |
| "learning_rate": 2.9867239101717308e-05, |
| "loss": 1.9295, |
| "step": 1470 |
| }, |
| { |
| "epoch": 0.83, |
| "learning_rate": 2.9864408378939423e-05, |
| "loss": 1.9335, |
| "step": 1480 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 2.986157765616154e-05, |
| "loss": 1.9318, |
| "step": 1490 |
| }, |
| { |
| "epoch": 0.84, |
| "learning_rate": 2.985874693338366e-05, |
| "loss": 1.9632, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.9855916210605778e-05, |
| "loss": 1.9302, |
| "step": 1510 |
| }, |
| { |
| "epoch": 0.85, |
| "learning_rate": 2.9853085487827893e-05, |
| "loss": 1.9235, |
| "step": 1520 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.985025476505001e-05, |
| "loss": 1.9272, |
| "step": 1530 |
| }, |
| { |
| "epoch": 0.86, |
| "learning_rate": 2.9847424042272125e-05, |
| "loss": 1.9228, |
| "step": 1540 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 2.9844593319494244e-05, |
| "loss": 1.9447, |
| "step": 1550 |
| }, |
| { |
| "epoch": 0.87, |
| "learning_rate": 2.9841762596716363e-05, |
| "loss": 1.9216, |
| "step": 1560 |
| }, |
| { |
| "epoch": 0.88, |
| "learning_rate": 2.983893187393848e-05, |
| "loss": 1.9131, |
| "step": 1570 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 2.9836101151160595e-05, |
| "loss": 1.9066, |
| "step": 1580 |
| }, |
| { |
| "epoch": 0.89, |
| "learning_rate": 2.9833270428382714e-05, |
| "loss": 1.9097, |
| "step": 1590 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 2.9830439705604833e-05, |
| "loss": 1.9269, |
| "step": 1600 |
| }, |
| { |
| "epoch": 0.9, |
| "learning_rate": 2.982760898282695e-05, |
| "loss": 1.888, |
| "step": 1610 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 2.9824778260049065e-05, |
| "loss": 1.8834, |
| "step": 1620 |
| }, |
| { |
| "epoch": 0.91, |
| "learning_rate": 2.982194753727118e-05, |
| "loss": 1.8627, |
| "step": 1630 |
| }, |
| { |
| "epoch": 0.92, |
| "learning_rate": 2.9819116814493303e-05, |
| "loss": 1.8651, |
| "step": 1640 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 2.981628609171542e-05, |
| "loss": 1.878, |
| "step": 1650 |
| }, |
| { |
| "epoch": 0.93, |
| "learning_rate": 2.9813455368937535e-05, |
| "loss": 1.8341, |
| "step": 1660 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 2.981062464615965e-05, |
| "loss": 1.8153, |
| "step": 1670 |
| }, |
| { |
| "epoch": 0.94, |
| "learning_rate": 2.9807793923381773e-05, |
| "loss": 1.8005, |
| "step": 1680 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 2.980496320060389e-05, |
| "loss": 1.7834, |
| "step": 1690 |
| }, |
| { |
| "epoch": 0.95, |
| "learning_rate": 2.9802132477826005e-05, |
| "loss": 1.7874, |
| "step": 1700 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 2.979930175504812e-05, |
| "loss": 1.73, |
| "step": 1710 |
| }, |
| { |
| "epoch": 0.96, |
| "learning_rate": 2.979647103227024e-05, |
| "loss": 1.7015, |
| "step": 1720 |
| }, |
| { |
| "epoch": 0.97, |
| "learning_rate": 2.979364030949236e-05, |
| "loss": 1.6747, |
| "step": 1730 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 2.9790809586714475e-05, |
| "loss": 1.6477, |
| "step": 1740 |
| }, |
| { |
| "epoch": 0.98, |
| "learning_rate": 2.978797886393659e-05, |
| "loss": 1.6574, |
| "step": 1750 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 2.978514814115871e-05, |
| "loss": 1.5797, |
| "step": 1760 |
| }, |
| { |
| "epoch": 0.99, |
| "learning_rate": 2.978231741838083e-05, |
| "loss": 1.5511, |
| "step": 1770 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.9779486695602945e-05, |
| "loss": 1.5349, |
| "step": 1780 |
| }, |
| { |
| "epoch": 1.0, |
| "eval_cer": 0.7836825388575067, |
| "eval_loss": 1.5499863624572754, |
| "eval_runtime": 311.0664, |
| "eval_samples_per_second": 17.112, |
| "eval_steps_per_second": 4.279, |
| "eval_wer": 0.8028005710514762, |
| "step": 1783 |
| }, |
| { |
| "epoch": 1.0, |
| "learning_rate": 2.977665597282506e-05, |
| "loss": 1.6386, |
| "step": 1790 |
| }, |
| { |
| "epoch": 1.01, |
| "learning_rate": 2.977382525004718e-05, |
| "loss": 1.462, |
| "step": 1800 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9770994527269296e-05, |
| "loss": 1.4318, |
| "step": 1810 |
| }, |
| { |
| "epoch": 1.02, |
| "learning_rate": 2.9768163804491415e-05, |
| "loss": 1.4208, |
| "step": 1820 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 2.976533308171353e-05, |
| "loss": 1.4202, |
| "step": 1830 |
| }, |
| { |
| "epoch": 1.03, |
| "learning_rate": 2.976250235893565e-05, |
| "loss": 1.4, |
| "step": 1840 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 2.9759671636157766e-05, |
| "loss": 1.3449, |
| "step": 1850 |
| }, |
| { |
| "epoch": 1.04, |
| "learning_rate": 2.9756840913379885e-05, |
| "loss": 1.3353, |
| "step": 1860 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 2.9754010190602e-05, |
| "loss": 1.3128, |
| "step": 1870 |
| }, |
| { |
| "epoch": 1.05, |
| "learning_rate": 2.975117946782412e-05, |
| "loss": 1.3059, |
| "step": 1880 |
| }, |
| { |
| "epoch": 1.06, |
| "learning_rate": 2.9748348745046236e-05, |
| "loss": 1.3185, |
| "step": 1890 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 2.974551802226835e-05, |
| "loss": 1.2632, |
| "step": 1900 |
| }, |
| { |
| "epoch": 1.07, |
| "learning_rate": 2.974268729949047e-05, |
| "loss": 1.2417, |
| "step": 1910 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 2.973985657671259e-05, |
| "loss": 1.2196, |
| "step": 1920 |
| }, |
| { |
| "epoch": 1.08, |
| "learning_rate": 2.9737025853934706e-05, |
| "loss": 1.2471, |
| "step": 1930 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 2.973419513115682e-05, |
| "loss": 1.2547, |
| "step": 1940 |
| }, |
| { |
| "epoch": 1.09, |
| "learning_rate": 2.973136440837894e-05, |
| "loss": 1.2012, |
| "step": 1950 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 2.972853368560106e-05, |
| "loss": 1.1678, |
| "step": 1960 |
| }, |
| { |
| "epoch": 1.1, |
| "learning_rate": 2.9725702962823176e-05, |
| "loss": 1.1661, |
| "step": 1970 |
| }, |
| { |
| "epoch": 1.11, |
| "learning_rate": 2.972287224004529e-05, |
| "loss": 1.1919, |
| "step": 1980 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 2.9720041517267407e-05, |
| "loss": 1.1897, |
| "step": 1990 |
| }, |
| { |
| "epoch": 1.12, |
| "learning_rate": 2.9717210794489526e-05, |
| "loss": 1.1455, |
| "step": 2000 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 2.9714380071711646e-05, |
| "loss": 1.1168, |
| "step": 2010 |
| }, |
| { |
| "epoch": 1.13, |
| "learning_rate": 2.971154934893376e-05, |
| "loss": 1.1172, |
| "step": 2020 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 2.9708718626155877e-05, |
| "loss": 1.1446, |
| "step": 2030 |
| }, |
| { |
| "epoch": 1.14, |
| "learning_rate": 2.9705887903377996e-05, |
| "loss": 1.1433, |
| "step": 2040 |
| }, |
| { |
| "epoch": 1.15, |
| "learning_rate": 2.9703057180600116e-05, |
| "loss": 1.0853, |
| "step": 2050 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 2.970022645782223e-05, |
| "loss": 1.0843, |
| "step": 2060 |
| }, |
| { |
| "epoch": 1.16, |
| "learning_rate": 2.9697395735044347e-05, |
| "loss": 1.0865, |
| "step": 2070 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 2.9694565012266466e-05, |
| "loss": 1.1062, |
| "step": 2080 |
| }, |
| { |
| "epoch": 1.17, |
| "learning_rate": 2.9691734289488586e-05, |
| "loss": 1.1018, |
| "step": 2090 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 2.96889035667107e-05, |
| "loss": 1.0441, |
| "step": 2100 |
| }, |
| { |
| "epoch": 1.18, |
| "learning_rate": 2.9686072843932817e-05, |
| "loss": 1.0395, |
| "step": 2110 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 2.9683242121154933e-05, |
| "loss": 1.0339, |
| "step": 2120 |
| }, |
| { |
| "epoch": 1.19, |
| "learning_rate": 2.9680411398377056e-05, |
| "loss": 1.0626, |
| "step": 2130 |
| }, |
| { |
| "epoch": 1.2, |
| "learning_rate": 2.967758067559917e-05, |
| "loss": 1.0766, |
| "step": 2140 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 2.9674749952821287e-05, |
| "loss": 1.0127, |
| "step": 2150 |
| }, |
| { |
| "epoch": 1.21, |
| "learning_rate": 2.9671919230043403e-05, |
| "loss": 0.9921, |
| "step": 2160 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 2.9669088507265522e-05, |
| "loss": 1.0152, |
| "step": 2170 |
| }, |
| { |
| "epoch": 1.22, |
| "learning_rate": 2.966625778448764e-05, |
| "loss": 1.0436, |
| "step": 2180 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 2.9663427061709757e-05, |
| "loss": 1.0452, |
| "step": 2190 |
| }, |
| { |
| "epoch": 1.23, |
| "learning_rate": 2.9660596338931873e-05, |
| "loss": 0.9815, |
| "step": 2200 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 2.9657765616153992e-05, |
| "loss": 0.9646, |
| "step": 2210 |
| }, |
| { |
| "epoch": 1.24, |
| "learning_rate": 2.965493489337611e-05, |
| "loss": 0.9683, |
| "step": 2220 |
| }, |
| { |
| "epoch": 1.25, |
| "learning_rate": 2.9652104170598227e-05, |
| "loss": 1.0026, |
| "step": 2230 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 2.9649273447820343e-05, |
| "loss": 0.9852, |
| "step": 2240 |
| }, |
| { |
| "epoch": 1.26, |
| "learning_rate": 2.9646442725042462e-05, |
| "loss": 0.9479, |
| "step": 2250 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 2.9643612002264578e-05, |
| "loss": 0.944, |
| "step": 2260 |
| }, |
| { |
| "epoch": 1.27, |
| "learning_rate": 2.9640781279486697e-05, |
| "loss": 0.9498, |
| "step": 2270 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 2.9637950556708813e-05, |
| "loss": 0.9987, |
| "step": 2280 |
| }, |
| { |
| "epoch": 1.28, |
| "learning_rate": 2.9635119833930932e-05, |
| "loss": 0.9779, |
| "step": 2290 |
| }, |
| { |
| "epoch": 1.29, |
| "learning_rate": 2.9632289111153048e-05, |
| "loss": 0.9283, |
| "step": 2300 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 2.9629458388375167e-05, |
| "loss": 0.928, |
| "step": 2310 |
| }, |
| { |
| "epoch": 1.3, |
| "learning_rate": 2.9626627665597283e-05, |
| "loss": 0.9317, |
| "step": 2320 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 2.9623796942819402e-05, |
| "loss": 0.9934, |
| "step": 2330 |
| }, |
| { |
| "epoch": 1.31, |
| "learning_rate": 2.9620966220041518e-05, |
| "loss": 0.9616, |
| "step": 2340 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 2.9618135497263637e-05, |
| "loss": 0.9217, |
| "step": 2350 |
| }, |
| { |
| "epoch": 1.32, |
| "learning_rate": 2.9615304774485753e-05, |
| "loss": 0.9211, |
| "step": 2360 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 2.9612474051707872e-05, |
| "loss": 0.8921, |
| "step": 2370 |
| }, |
| { |
| "epoch": 1.33, |
| "learning_rate": 2.9609643328929988e-05, |
| "loss": 0.9404, |
| "step": 2380 |
| }, |
| { |
| "epoch": 1.34, |
| "learning_rate": 2.9606812606152104e-05, |
| "loss": 0.9349, |
| "step": 2390 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 2.9603981883374223e-05, |
| "loss": 0.8915, |
| "step": 2400 |
| }, |
| { |
| "epoch": 1.35, |
| "learning_rate": 2.960115116059634e-05, |
| "loss": 0.8871, |
| "step": 2410 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 2.9598320437818458e-05, |
| "loss": 0.9019, |
| "step": 2420 |
| }, |
| { |
| "epoch": 1.36, |
| "learning_rate": 2.9595489715040574e-05, |
| "loss": 0.928, |
| "step": 2430 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 2.9592658992262693e-05, |
| "loss": 0.9319, |
| "step": 2440 |
| }, |
| { |
| "epoch": 1.37, |
| "learning_rate": 2.958982826948481e-05, |
| "loss": 0.8852, |
| "step": 2450 |
| }, |
| { |
| "epoch": 1.38, |
| "learning_rate": 2.9586997546706928e-05, |
| "loss": 0.8825, |
| "step": 2460 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 2.9584166823929044e-05, |
| "loss": 0.8626, |
| "step": 2470 |
| }, |
| { |
| "epoch": 1.39, |
| "learning_rate": 2.958133610115116e-05, |
| "loss": 0.9104, |
| "step": 2480 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 2.957850537837328e-05, |
| "loss": 0.8975, |
| "step": 2490 |
| }, |
| { |
| "epoch": 1.4, |
| "learning_rate": 2.9575674655595398e-05, |
| "loss": 0.8528, |
| "step": 2500 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 2.9572843932817514e-05, |
| "loss": 0.8799, |
| "step": 2510 |
| }, |
| { |
| "epoch": 1.41, |
| "learning_rate": 2.957001321003963e-05, |
| "loss": 0.8761, |
| "step": 2520 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 2.956718248726175e-05, |
| "loss": 0.9048, |
| "step": 2530 |
| }, |
| { |
| "epoch": 1.42, |
| "learning_rate": 2.9564351764483868e-05, |
| "loss": 0.9104, |
| "step": 2540 |
| }, |
| { |
| "epoch": 1.43, |
| "learning_rate": 2.9561521041705984e-05, |
| "loss": 0.8377, |
| "step": 2550 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 2.95586903189281e-05, |
| "loss": 0.8317, |
| "step": 2560 |
| }, |
| { |
| "epoch": 1.44, |
| "learning_rate": 2.9555859596150215e-05, |
| "loss": 0.8454, |
| "step": 2570 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 2.9553028873372338e-05, |
| "loss": 0.9074, |
| "step": 2580 |
| }, |
| { |
| "epoch": 1.45, |
| "learning_rate": 2.9550198150594454e-05, |
| "loss": 0.8922, |
| "step": 2590 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 2.954736742781657e-05, |
| "loss": 0.8348, |
| "step": 2600 |
| }, |
| { |
| "epoch": 1.46, |
| "learning_rate": 2.9544536705038685e-05, |
| "loss": 0.8326, |
| "step": 2610 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 2.9541989054538594e-05, |
| "loss": 0.813, |
| "step": 2620 |
| }, |
| { |
| "epoch": 1.47, |
| "learning_rate": 2.953915833176071e-05, |
| "loss": 0.8655, |
| "step": 2630 |
| }, |
| { |
| "epoch": 1.48, |
| "learning_rate": 2.9536327608982826e-05, |
| "loss": 0.8846, |
| "step": 2640 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 2.9533496886204945e-05, |
| "loss": 0.8351, |
| "step": 2650 |
| }, |
| { |
| "epoch": 1.49, |
| "learning_rate": 2.9530666163427064e-05, |
| "loss": 0.8114, |
| "step": 2660 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 2.952783544064918e-05, |
| "loss": 0.8146, |
| "step": 2670 |
| }, |
| { |
| "epoch": 1.5, |
| "learning_rate": 2.9525004717871296e-05, |
| "loss": 0.8629, |
| "step": 2680 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 2.9522173995093415e-05, |
| "loss": 0.8638, |
| "step": 2690 |
| }, |
| { |
| "epoch": 1.51, |
| "learning_rate": 2.9519343272315534e-05, |
| "loss": 0.8153, |
| "step": 2700 |
| }, |
| { |
| "epoch": 1.52, |
| "learning_rate": 2.951651254953765e-05, |
| "loss": 0.8182, |
| "step": 2710 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 2.9513681826759766e-05, |
| "loss": 0.8315, |
| "step": 2720 |
| }, |
| { |
| "epoch": 1.53, |
| "learning_rate": 2.951085110398188e-05, |
| "loss": 0.8468, |
| "step": 2730 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 2.9508020381204004e-05, |
| "loss": 0.8456, |
| "step": 2740 |
| }, |
| { |
| "epoch": 1.54, |
| "learning_rate": 2.950518965842612e-05, |
| "loss": 0.7971, |
| "step": 2750 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 2.9502358935648236e-05, |
| "loss": 0.8003, |
| "step": 2760 |
| }, |
| { |
| "epoch": 1.55, |
| "learning_rate": 2.949952821287035e-05, |
| "loss": 0.7975, |
| "step": 2770 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 2.9496697490092474e-05, |
| "loss": 0.8267, |
| "step": 2780 |
| }, |
| { |
| "epoch": 1.56, |
| "learning_rate": 2.949386676731459e-05, |
| "loss": 0.853, |
| "step": 2790 |
| }, |
| { |
| "epoch": 1.57, |
| "learning_rate": 2.9491036044536706e-05, |
| "loss": 0.7963, |
| "step": 2800 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 2.948820532175882e-05, |
| "loss": 0.7784, |
| "step": 2810 |
| }, |
| { |
| "epoch": 1.58, |
| "learning_rate": 2.948537459898094e-05, |
| "loss": 0.7944, |
| "step": 2820 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 2.948254387620306e-05, |
| "loss": 0.8051, |
| "step": 2830 |
| }, |
| { |
| "epoch": 1.59, |
| "learning_rate": 2.9479713153425176e-05, |
| "loss": 0.8309, |
| "step": 2840 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 2.947688243064729e-05, |
| "loss": 0.7672, |
| "step": 2850 |
| }, |
| { |
| "epoch": 1.6, |
| "learning_rate": 2.947405170786941e-05, |
| "loss": 0.7972, |
| "step": 2860 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 2.947122098509153e-05, |
| "loss": 0.7701, |
| "step": 2870 |
| }, |
| { |
| "epoch": 1.61, |
| "learning_rate": 2.9468390262313646e-05, |
| "loss": 0.8248, |
| "step": 2880 |
| }, |
| { |
| "epoch": 1.62, |
| "learning_rate": 2.946555953953576e-05, |
| "loss": 0.818, |
| "step": 2890 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 2.946272881675788e-05, |
| "loss": 0.7704, |
| "step": 2900 |
| }, |
| { |
| "epoch": 1.63, |
| "learning_rate": 2.9459898093979996e-05, |
| "loss": 0.7693, |
| "step": 2910 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 2.9457067371202116e-05, |
| "loss": 0.7643, |
| "step": 2920 |
| }, |
| { |
| "epoch": 1.64, |
| "learning_rate": 2.945423664842423e-05, |
| "loss": 0.8132, |
| "step": 2930 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 2.9451405925646347e-05, |
| "loss": 0.8182, |
| "step": 2940 |
| }, |
| { |
| "epoch": 1.65, |
| "learning_rate": 2.9448575202868466e-05, |
| "loss": 0.7584, |
| "step": 2950 |
| }, |
| { |
| "epoch": 1.66, |
| "learning_rate": 2.9445744480090585e-05, |
| "loss": 0.762, |
| "step": 2960 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 2.94429137573127e-05, |
| "loss": 0.7926, |
| "step": 2970 |
| }, |
| { |
| "epoch": 1.67, |
| "learning_rate": 2.9440083034534817e-05, |
| "loss": 0.8043, |
| "step": 2980 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 2.9437252311756936e-05, |
| "loss": 0.8108, |
| "step": 2990 |
| }, |
| { |
| "epoch": 1.68, |
| "learning_rate": 2.9434421588979052e-05, |
| "loss": 0.7498, |
| "step": 3000 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 2.943159086620117e-05, |
| "loss": 0.74, |
| "step": 3010 |
| }, |
| { |
| "epoch": 1.69, |
| "learning_rate": 2.9428760143423287e-05, |
| "loss": 0.7501, |
| "step": 3020 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 2.9425929420645406e-05, |
| "loss": 0.7679, |
| "step": 3030 |
| }, |
| { |
| "epoch": 1.7, |
| "learning_rate": 2.9423098697867522e-05, |
| "loss": 0.7698, |
| "step": 3040 |
| }, |
| { |
| "epoch": 1.71, |
| "learning_rate": 2.942026797508964e-05, |
| "loss": 0.7402, |
| "step": 3050 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 2.9417437252311757e-05, |
| "loss": 0.7273, |
| "step": 3060 |
| }, |
| { |
| "epoch": 1.72, |
| "learning_rate": 2.9414606529533876e-05, |
| "loss": 0.7547, |
| "step": 3070 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 2.9411775806755992e-05, |
| "loss": 0.793, |
| "step": 3080 |
| }, |
| { |
| "epoch": 1.73, |
| "learning_rate": 2.9408945083978108e-05, |
| "loss": 0.7924, |
| "step": 3090 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 2.9406114361200227e-05, |
| "loss": 0.7545, |
| "step": 3100 |
| }, |
| { |
| "epoch": 1.74, |
| "learning_rate": 2.9403283638422346e-05, |
| "loss": 0.7179, |
| "step": 3110 |
| }, |
| { |
| "epoch": 1.75, |
| "learning_rate": 2.9400452915644462e-05, |
| "loss": 0.7483, |
| "step": 3120 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 2.9397622192866578e-05, |
| "loss": 0.791, |
| "step": 3130 |
| }, |
| { |
| "epoch": 1.76, |
| "learning_rate": 2.9394791470088697e-05, |
| "loss": 0.7904, |
| "step": 3140 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 2.9391960747310816e-05, |
| "loss": 0.7242, |
| "step": 3150 |
| }, |
| { |
| "epoch": 1.77, |
| "learning_rate": 2.9389130024532932e-05, |
| "loss": 0.7166, |
| "step": 3160 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 2.9386299301755048e-05, |
| "loss": 0.721, |
| "step": 3170 |
| }, |
| { |
| "epoch": 1.78, |
| "learning_rate": 2.9383468578977164e-05, |
| "loss": 0.7749, |
| "step": 3180 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 2.9380637856199286e-05, |
| "loss": 0.7757, |
| "step": 3190 |
| }, |
| { |
| "epoch": 1.79, |
| "learning_rate": 2.9377807133421402e-05, |
| "loss": 0.7332, |
| "step": 3200 |
| }, |
| { |
| "epoch": 1.8, |
| "learning_rate": 2.9374976410643518e-05, |
| "loss": 0.7201, |
| "step": 3210 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 2.9372145687865634e-05, |
| "loss": 0.7247, |
| "step": 3220 |
| }, |
| { |
| "epoch": 1.81, |
| "learning_rate": 2.9369314965087756e-05, |
| "loss": 0.7661, |
| "step": 3230 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 2.9366484242309872e-05, |
| "loss": 0.7812, |
| "step": 3240 |
| }, |
| { |
| "epoch": 1.82, |
| "learning_rate": 2.9363653519531988e-05, |
| "loss": 0.6972, |
| "step": 3250 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 2.9360822796754104e-05, |
| "loss": 0.7056, |
| "step": 3260 |
| }, |
| { |
| "epoch": 1.83, |
| "learning_rate": 2.9357992073976223e-05, |
| "loss": 0.7262, |
| "step": 3270 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 2.9355161351198342e-05, |
| "loss": 0.7611, |
| "step": 3280 |
| }, |
| { |
| "epoch": 1.84, |
| "learning_rate": 2.9352330628420458e-05, |
| "loss": 0.7496, |
| "step": 3290 |
| }, |
| { |
| "epoch": 1.85, |
| "learning_rate": 2.9349499905642574e-05, |
| "loss": 0.7093, |
| "step": 3300 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 2.9346669182864693e-05, |
| "loss": 0.7108, |
| "step": 3310 |
| }, |
| { |
| "epoch": 1.86, |
| "learning_rate": 2.9343838460086812e-05, |
| "loss": 0.6877, |
| "step": 3320 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 2.9341007737308928e-05, |
| "loss": 0.7517, |
| "step": 3330 |
| }, |
| { |
| "epoch": 1.87, |
| "learning_rate": 2.9338177014531044e-05, |
| "loss": 0.7295, |
| "step": 3340 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 2.933534629175316e-05, |
| "loss": 0.6843, |
| "step": 3350 |
| }, |
| { |
| "epoch": 1.88, |
| "learning_rate": 2.933251556897528e-05, |
| "loss": 0.6976, |
| "step": 3360 |
| }, |
| { |
| "epoch": 1.89, |
| "learning_rate": 2.9329684846197398e-05, |
| "loss": 0.7029, |
| "step": 3370 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 2.9326854123419513e-05, |
| "loss": 0.7612, |
| "step": 3380 |
| }, |
| { |
| "epoch": 1.9, |
| "learning_rate": 2.932402340064163e-05, |
| "loss": 0.7504, |
| "step": 3390 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 2.932119267786375e-05, |
| "loss": 0.6829, |
| "step": 3400 |
| }, |
| { |
| "epoch": 1.91, |
| "learning_rate": 2.9318361955085868e-05, |
| "loss": 0.6806, |
| "step": 3410 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 2.9315531232307983e-05, |
| "loss": 0.6949, |
| "step": 3420 |
| }, |
| { |
| "epoch": 1.92, |
| "learning_rate": 2.93127005095301e-05, |
| "loss": 0.7453, |
| "step": 3430 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 2.930986978675222e-05, |
| "loss": 0.7509, |
| "step": 3440 |
| }, |
| { |
| "epoch": 1.93, |
| "learning_rate": 2.9307039063974334e-05, |
| "loss": 0.6776, |
| "step": 3450 |
| }, |
| { |
| "epoch": 1.94, |
| "learning_rate": 2.9304208341196453e-05, |
| "loss": 0.6886, |
| "step": 3460 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 2.930137761841857e-05, |
| "loss": 0.672, |
| "step": 3470 |
| }, |
| { |
| "epoch": 1.95, |
| "learning_rate": 2.929854689564069e-05, |
| "loss": 0.7158, |
| "step": 3480 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 2.9295716172862804e-05, |
| "loss": 0.7133, |
| "step": 3490 |
| }, |
| { |
| "epoch": 1.96, |
| "learning_rate": 2.9292885450084923e-05, |
| "loss": 0.665, |
| "step": 3500 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 2.929005472730704e-05, |
| "loss": 0.6845, |
| "step": 3510 |
| }, |
| { |
| "epoch": 1.97, |
| "learning_rate": 2.928722400452916e-05, |
| "loss": 0.6608, |
| "step": 3520 |
| }, |
| { |
| "epoch": 1.98, |
| "learning_rate": 2.9284393281751274e-05, |
| "loss": 0.725, |
| "step": 3530 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 2.928156255897339e-05, |
| "loss": 0.7123, |
| "step": 3540 |
| }, |
| { |
| "epoch": 1.99, |
| "learning_rate": 2.927873183619551e-05, |
| "loss": 0.6641, |
| "step": 3550 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 2.927590111341763e-05, |
| "loss": 0.677, |
| "step": 3560 |
| }, |
| { |
| "epoch": 2.0, |
| "eval_cer": 0.2863964848644605, |
| "eval_loss": 0.773628294467926, |
| "eval_runtime": 309.8033, |
| "eval_samples_per_second": 17.182, |
| "eval_steps_per_second": 4.296, |
| "eval_wer": 0.3935049467465578, |
| "step": 3566 |
| }, |
| { |
| "epoch": 2.0, |
| "learning_rate": 2.9273070390639744e-05, |
| "loss": 0.7897, |
| "step": 3570 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 2.927023966786186e-05, |
| "loss": 0.6561, |
| "step": 3580 |
| }, |
| { |
| "epoch": 2.01, |
| "learning_rate": 2.926740894508398e-05, |
| "loss": 0.654, |
| "step": 3590 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 2.92645782223061e-05, |
| "loss": 0.6749, |
| "step": 3600 |
| }, |
| { |
| "epoch": 2.02, |
| "learning_rate": 2.9261747499528214e-05, |
| "loss": 0.6953, |
| "step": 3610 |
| }, |
| { |
| "epoch": 2.03, |
| "learning_rate": 2.925891677675033e-05, |
| "loss": 0.7238, |
| "step": 3620 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 2.9256086053972446e-05, |
| "loss": 0.657, |
| "step": 3630 |
| }, |
| { |
| "epoch": 2.04, |
| "learning_rate": 2.925325533119457e-05, |
| "loss": 0.6475, |
| "step": 3640 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 2.9250424608416684e-05, |
| "loss": 0.6632, |
| "step": 3650 |
| }, |
| { |
| "epoch": 2.05, |
| "learning_rate": 2.92475938856388e-05, |
| "loss": 0.6758, |
| "step": 3660 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 2.9244763162860916e-05, |
| "loss": 0.7328, |
| "step": 3670 |
| }, |
| { |
| "epoch": 2.06, |
| "learning_rate": 2.924193244008304e-05, |
| "loss": 0.6625, |
| "step": 3680 |
| }, |
| { |
| "epoch": 2.07, |
| "learning_rate": 2.9239101717305154e-05, |
| "loss": 0.6557, |
| "step": 3690 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 2.923627099452727e-05, |
| "loss": 0.6655, |
| "step": 3700 |
| }, |
| { |
| "epoch": 2.08, |
| "learning_rate": 2.9233440271749386e-05, |
| "loss": 0.6805, |
| "step": 3710 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 2.9230609548971505e-05, |
| "loss": 0.7337, |
| "step": 3720 |
| }, |
| { |
| "epoch": 2.09, |
| "learning_rate": 2.9227778826193624e-05, |
| "loss": 0.6584, |
| "step": 3730 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 2.922494810341574e-05, |
| "loss": 0.6545, |
| "step": 3740 |
| }, |
| { |
| "epoch": 2.1, |
| "learning_rate": 2.9222117380637856e-05, |
| "loss": 0.6617, |
| "step": 3750 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 2.921928665785997e-05, |
| "loss": 0.7022, |
| "step": 3760 |
| }, |
| { |
| "epoch": 2.11, |
| "learning_rate": 2.9216455935082094e-05, |
| "loss": 0.7017, |
| "step": 3770 |
| }, |
| { |
| "epoch": 2.12, |
| "learning_rate": 2.921362521230421e-05, |
| "loss": 0.6405, |
| "step": 3780 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 2.9210794489526326e-05, |
| "loss": 0.654, |
| "step": 3790 |
| }, |
| { |
| "epoch": 2.13, |
| "learning_rate": 2.920796376674844e-05, |
| "loss": 0.6671, |
| "step": 3800 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 2.920513304397056e-05, |
| "loss": 0.6745, |
| "step": 3810 |
| }, |
| { |
| "epoch": 2.14, |
| "learning_rate": 2.920230232119268e-05, |
| "loss": 0.716, |
| "step": 3820 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 2.9199471598414796e-05, |
| "loss": 0.6523, |
| "step": 3830 |
| }, |
| { |
| "epoch": 2.15, |
| "learning_rate": 2.919664087563691e-05, |
| "loss": 0.6376, |
| "step": 3840 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 2.919381015285903e-05, |
| "loss": 0.6363, |
| "step": 3850 |
| }, |
| { |
| "epoch": 2.16, |
| "learning_rate": 2.919097943008115e-05, |
| "loss": 0.6686, |
| "step": 3860 |
| }, |
| { |
| "epoch": 2.17, |
| "learning_rate": 2.9188148707303266e-05, |
| "loss": 0.6917, |
| "step": 3870 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 2.918531798452538e-05, |
| "loss": 0.6369, |
| "step": 3880 |
| }, |
| { |
| "epoch": 2.18, |
| "learning_rate": 2.91824872617475e-05, |
| "loss": 0.6318, |
| "step": 3890 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 2.9179656538969616e-05, |
| "loss": 0.6219, |
| "step": 3900 |
| }, |
| { |
| "epoch": 2.19, |
| "learning_rate": 2.9176825816191736e-05, |
| "loss": 0.6903, |
| "step": 3910 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 2.917399509341385e-05, |
| "loss": 0.6913, |
| "step": 3920 |
| }, |
| { |
| "epoch": 2.2, |
| "learning_rate": 2.917116437063597e-05, |
| "loss": 0.64, |
| "step": 3930 |
| }, |
| { |
| "epoch": 2.21, |
| "learning_rate": 2.9168333647858086e-05, |
| "loss": 0.6308, |
| "step": 3940 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 2.9165502925080206e-05, |
| "loss": 0.6212, |
| "step": 3950 |
| }, |
| { |
| "epoch": 2.22, |
| "learning_rate": 2.916267220230232e-05, |
| "loss": 0.6349, |
| "step": 3960 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 2.915984147952444e-05, |
| "loss": 0.6923, |
| "step": 3970 |
| }, |
| { |
| "epoch": 2.23, |
| "learning_rate": 2.9157010756746556e-05, |
| "loss": 0.6238, |
| "step": 3980 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 2.9154180033968672e-05, |
| "loss": 0.6274, |
| "step": 3990 |
| }, |
| { |
| "epoch": 2.24, |
| "learning_rate": 2.915134931119079e-05, |
| "loss": 0.6298, |
| "step": 4000 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 2.914851858841291e-05, |
| "loss": 0.661, |
| "step": 4010 |
| }, |
| { |
| "epoch": 2.25, |
| "learning_rate": 2.9145687865635026e-05, |
| "loss": 0.7068, |
| "step": 4020 |
| }, |
| { |
| "epoch": 2.26, |
| "learning_rate": 2.9142857142857142e-05, |
| "loss": 0.6347, |
| "step": 4030 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 2.914002642007926e-05, |
| "loss": 0.6186, |
| "step": 4040 |
| }, |
| { |
| "epoch": 2.27, |
| "learning_rate": 2.913719569730138e-05, |
| "loss": 0.6102, |
| "step": 4050 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 2.9134364974523496e-05, |
| "loss": 0.6292, |
| "step": 4060 |
| }, |
| { |
| "epoch": 2.28, |
| "learning_rate": 2.9131534251745612e-05, |
| "loss": 0.6939, |
| "step": 4070 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 2.9128703528967728e-05, |
| "loss": 0.5952, |
| "step": 4080 |
| }, |
| { |
| "epoch": 2.29, |
| "learning_rate": 2.912587280618985e-05, |
| "loss": 0.6198, |
| "step": 4090 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 2.9123042083411966e-05, |
| "loss": 0.6129, |
| "step": 4100 |
| }, |
| { |
| "epoch": 2.3, |
| "learning_rate": 2.9120211360634082e-05, |
| "loss": 0.6466, |
| "step": 4110 |
| }, |
| { |
| "epoch": 2.31, |
| "learning_rate": 2.9117380637856198e-05, |
| "loss": 0.6839, |
| "step": 4120 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 2.911454991507832e-05, |
| "loss": 0.6096, |
| "step": 4130 |
| }, |
| { |
| "epoch": 2.32, |
| "learning_rate": 2.9111719192300436e-05, |
| "loss": 0.6175, |
| "step": 4140 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 2.9108888469522552e-05, |
| "loss": 0.6211, |
| "step": 4150 |
| }, |
| { |
| "epoch": 2.33, |
| "learning_rate": 2.9106057746744668e-05, |
| "loss": 0.635, |
| "step": 4160 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 2.9103227023966787e-05, |
| "loss": 0.6879, |
| "step": 4170 |
| }, |
| { |
| "epoch": 2.34, |
| "learning_rate": 2.9100396301188906e-05, |
| "loss": 0.6202, |
| "step": 4180 |
| }, |
| { |
| "epoch": 2.35, |
| "learning_rate": 2.9097565578411022e-05, |
| "loss": 0.5994, |
| "step": 4190 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 2.9094734855633138e-05, |
| "loss": 0.6186, |
| "step": 4200 |
| }, |
| { |
| "epoch": 2.36, |
| "learning_rate": 2.9091904132855254e-05, |
| "loss": 0.6133, |
| "step": 4210 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 2.9089073410077376e-05, |
| "loss": 0.6606, |
| "step": 4220 |
| }, |
| { |
| "epoch": 2.37, |
| "learning_rate": 2.9086242687299492e-05, |
| "loss": 0.6176, |
| "step": 4230 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 2.9083411964521608e-05, |
| "loss": 0.609, |
| "step": 4240 |
| }, |
| { |
| "epoch": 2.38, |
| "learning_rate": 2.9080581241743724e-05, |
| "loss": 0.6064, |
| "step": 4250 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 2.9077750518965843e-05, |
| "loss": 0.6259, |
| "step": 4260 |
| }, |
| { |
| "epoch": 2.39, |
| "learning_rate": 2.9074919796187962e-05, |
| "loss": 0.7006, |
| "step": 4270 |
| }, |
| { |
| "epoch": 2.4, |
| "learning_rate": 2.9072089073410078e-05, |
| "loss": 0.609, |
| "step": 4280 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 2.9069258350632194e-05, |
| "loss": 0.6077, |
| "step": 4290 |
| }, |
| { |
| "epoch": 2.41, |
| "learning_rate": 2.9066427627854313e-05, |
| "loss": 0.5983, |
| "step": 4300 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 2.9063596905076432e-05, |
| "loss": 0.6113, |
| "step": 4310 |
| }, |
| { |
| "epoch": 2.42, |
| "learning_rate": 2.9060766182298548e-05, |
| "loss": 0.6849, |
| "step": 4320 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 2.9057935459520664e-05, |
| "loss": 0.6073, |
| "step": 4330 |
| }, |
| { |
| "epoch": 2.43, |
| "learning_rate": 2.9055104736742783e-05, |
| "loss": 0.6015, |
| "step": 4340 |
| }, |
| { |
| "epoch": 2.44, |
| "learning_rate": 2.90522740139649e-05, |
| "loss": 0.5945, |
| "step": 4350 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 2.9049443291187018e-05, |
| "loss": 0.632, |
| "step": 4360 |
| }, |
| { |
| "epoch": 2.45, |
| "learning_rate": 2.9046612568409134e-05, |
| "loss": 0.6794, |
| "step": 4370 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 2.9043781845631253e-05, |
| "loss": 0.5892, |
| "step": 4380 |
| }, |
| { |
| "epoch": 2.46, |
| "learning_rate": 2.904095112285337e-05, |
| "loss": 0.6103, |
| "step": 4390 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 2.9038120400075488e-05, |
| "loss": 0.6041, |
| "step": 4400 |
| }, |
| { |
| "epoch": 2.47, |
| "learning_rate": 2.9035289677297604e-05, |
| "loss": 0.6394, |
| "step": 4410 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 2.9032458954519723e-05, |
| "loss": 0.6814, |
| "step": 4420 |
| }, |
| { |
| "epoch": 2.48, |
| "learning_rate": 2.902962823174184e-05, |
| "loss": 0.5948, |
| "step": 4430 |
| }, |
| { |
| "epoch": 2.49, |
| "learning_rate": 2.9026797508963954e-05, |
| "loss": 0.5827, |
| "step": 4440 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 2.9023966786186074e-05, |
| "loss": 0.6188, |
| "step": 4450 |
| }, |
| { |
| "epoch": 2.5, |
| "learning_rate": 2.9021136063408193e-05, |
| "loss": 0.6108, |
| "step": 4460 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 2.901830534063031e-05, |
| "loss": 0.6656, |
| "step": 4470 |
| }, |
| { |
| "epoch": 2.51, |
| "learning_rate": 2.9015474617852424e-05, |
| "loss": 0.5815, |
| "step": 4480 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 2.9012643895074543e-05, |
| "loss": 0.5864, |
| "step": 4490 |
| }, |
| { |
| "epoch": 2.52, |
| "learning_rate": 2.9009813172296663e-05, |
| "loss": 0.5825, |
| "step": 4500 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 2.900698244951878e-05, |
| "loss": 0.6157, |
| "step": 4510 |
| }, |
| { |
| "epoch": 2.53, |
| "learning_rate": 2.9004151726740894e-05, |
| "loss": 0.6805, |
| "step": 4520 |
| }, |
| { |
| "epoch": 2.54, |
| "learning_rate": 2.900132100396301e-05, |
| "loss": 0.5881, |
| "step": 4530 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 2.8998490281185133e-05, |
| "loss": 0.5786, |
| "step": 4540 |
| }, |
| { |
| "epoch": 2.55, |
| "learning_rate": 2.899565955840725e-05, |
| "loss": 0.5926, |
| "step": 4550 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 2.8992828835629364e-05, |
| "loss": 0.6128, |
| "step": 4560 |
| }, |
| { |
| "epoch": 2.56, |
| "learning_rate": 2.898999811285148e-05, |
| "loss": 0.6529, |
| "step": 4570 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 2.8987167390073603e-05, |
| "loss": 0.5953, |
| "step": 4580 |
| }, |
| { |
| "epoch": 2.57, |
| "learning_rate": 2.898433666729572e-05, |
| "loss": 0.5728, |
| "step": 4590 |
| }, |
| { |
| "epoch": 2.58, |
| "learning_rate": 2.8981505944517834e-05, |
| "loss": 0.5783, |
| "step": 4600 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 2.897867522173995e-05, |
| "loss": 0.5983, |
| "step": 4610 |
| }, |
| { |
| "epoch": 2.59, |
| "learning_rate": 2.8975844498962066e-05, |
| "loss": 0.628, |
| "step": 4620 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 2.897301377618419e-05, |
| "loss": 0.5768, |
| "step": 4630 |
| }, |
| { |
| "epoch": 2.6, |
| "learning_rate": 2.8970183053406304e-05, |
| "loss": 0.5834, |
| "step": 4640 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 2.896735233062842e-05, |
| "loss": 0.5805, |
| "step": 4650 |
| }, |
| { |
| "epoch": 2.61, |
| "learning_rate": 2.8964521607850536e-05, |
| "loss": 0.6116, |
| "step": 4660 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 2.896169088507266e-05, |
| "loss": 0.6436, |
| "step": 4670 |
| }, |
| { |
| "epoch": 2.62, |
| "learning_rate": 2.8958860162294774e-05, |
| "loss": 0.569, |
| "step": 4680 |
| }, |
| { |
| "epoch": 2.63, |
| "learning_rate": 2.895602943951689e-05, |
| "loss": 0.5859, |
| "step": 4690 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 2.8953198716739006e-05, |
| "loss": 0.5921, |
| "step": 4700 |
| }, |
| { |
| "epoch": 2.64, |
| "learning_rate": 2.8950367993961125e-05, |
| "loss": 0.6254, |
| "step": 4710 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 2.8947537271183244e-05, |
| "loss": 0.6233, |
| "step": 4720 |
| }, |
| { |
| "epoch": 2.65, |
| "learning_rate": 2.894470654840536e-05, |
| "loss": 0.5449, |
| "step": 4730 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 2.8941875825627476e-05, |
| "loss": 0.588, |
| "step": 4740 |
| }, |
| { |
| "epoch": 2.66, |
| "learning_rate": 2.8939045102849595e-05, |
| "loss": 0.5737, |
| "step": 4750 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 2.8936214380071714e-05, |
| "loss": 0.6037, |
| "step": 4760 |
| }, |
| { |
| "epoch": 2.67, |
| "learning_rate": 2.893338365729383e-05, |
| "loss": 0.6298, |
| "step": 4770 |
| }, |
| { |
| "epoch": 2.68, |
| "learning_rate": 2.8930552934515946e-05, |
| "loss": 0.5602, |
| "step": 4780 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 2.8927722211738065e-05, |
| "loss": 0.5609, |
| "step": 4790 |
| }, |
| { |
| "epoch": 2.69, |
| "learning_rate": 2.892489148896018e-05, |
| "loss": 0.5733, |
| "step": 4800 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 2.89220607661823e-05, |
| "loss": 0.6003, |
| "step": 4810 |
| }, |
| { |
| "epoch": 2.7, |
| "learning_rate": 2.8919230043404416e-05, |
| "loss": 0.6406, |
| "step": 4820 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 2.8916399320626535e-05, |
| "loss": 0.557, |
| "step": 4830 |
| }, |
| { |
| "epoch": 2.71, |
| "learning_rate": 2.891356859784865e-05, |
| "loss": 0.5533, |
| "step": 4840 |
| }, |
| { |
| "epoch": 2.72, |
| "learning_rate": 2.891073787507077e-05, |
| "loss": 0.582, |
| "step": 4850 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 2.8907907152292886e-05, |
| "loss": 0.5913, |
| "step": 4860 |
| }, |
| { |
| "epoch": 2.73, |
| "learning_rate": 2.8905076429515005e-05, |
| "loss": 0.6417, |
| "step": 4870 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 2.890224570673712e-05, |
| "loss": 0.5469, |
| "step": 4880 |
| }, |
| { |
| "epoch": 2.74, |
| "learning_rate": 2.8899414983959236e-05, |
| "loss": 0.559, |
| "step": 4890 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 2.8896584261181356e-05, |
| "loss": 0.5567, |
| "step": 4900 |
| }, |
| { |
| "epoch": 2.75, |
| "learning_rate": 2.8893753538403475e-05, |
| "loss": 0.606, |
| "step": 4910 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 2.889092281562559e-05, |
| "loss": 0.644, |
| "step": 4920 |
| }, |
| { |
| "epoch": 2.76, |
| "learning_rate": 2.8888092092847706e-05, |
| "loss": 0.5682, |
| "step": 4930 |
| }, |
| { |
| "epoch": 2.77, |
| "learning_rate": 2.8885261370069826e-05, |
| "loss": 0.5616, |
| "step": 4940 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 2.8882430647291945e-05, |
| "loss": 0.5511, |
| "step": 4950 |
| }, |
| { |
| "epoch": 2.78, |
| "learning_rate": 2.887959992451406e-05, |
| "loss": 0.5743, |
| "step": 4960 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 2.8876769201736176e-05, |
| "loss": 0.6413, |
| "step": 4970 |
| }, |
| { |
| "epoch": 2.79, |
| "learning_rate": 2.8873938478958292e-05, |
| "loss": 0.5508, |
| "step": 4980 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 2.8871107756180415e-05, |
| "loss": 0.5626, |
| "step": 4990 |
| }, |
| { |
| "epoch": 2.8, |
| "learning_rate": 2.886827703340253e-05, |
| "loss": 0.5639, |
| "step": 5000 |
| }, |
| { |
| "epoch": 2.81, |
| "learning_rate": 2.8865446310624646e-05, |
| "loss": 0.584, |
| "step": 5010 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 2.8862615587846762e-05, |
| "loss": 0.6044, |
| "step": 5020 |
| }, |
| { |
| "epoch": 2.82, |
| "learning_rate": 2.885978486506888e-05, |
| "loss": 0.5568, |
| "step": 5030 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 2.8856954142291e-05, |
| "loss": 0.5688, |
| "step": 5040 |
| }, |
| { |
| "epoch": 2.83, |
| "learning_rate": 2.8854123419513116e-05, |
| "loss": 0.5544, |
| "step": 5050 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 2.8851292696735232e-05, |
| "loss": 0.5625, |
| "step": 5060 |
| }, |
| { |
| "epoch": 2.84, |
| "learning_rate": 2.8848461973957348e-05, |
| "loss": 0.6395, |
| "step": 5070 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 2.884563125117947e-05, |
| "loss": 0.5481, |
| "step": 5080 |
| }, |
| { |
| "epoch": 2.85, |
| "learning_rate": 2.8842800528401586e-05, |
| "loss": 0.5589, |
| "step": 5090 |
| }, |
| { |
| "epoch": 2.86, |
| "learning_rate": 2.8839969805623702e-05, |
| "loss": 0.5454, |
| "step": 5100 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 2.8837139082845818e-05, |
| "loss": 0.5799, |
| "step": 5110 |
| }, |
| { |
| "epoch": 2.87, |
| "learning_rate": 2.883430836006794e-05, |
| "loss": 0.6251, |
| "step": 5120 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 2.8831477637290056e-05, |
| "loss": 0.5346, |
| "step": 5130 |
| }, |
| { |
| "epoch": 2.88, |
| "learning_rate": 2.8828646914512172e-05, |
| "loss": 0.5519, |
| "step": 5140 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 2.8825816191734288e-05, |
| "loss": 0.5559, |
| "step": 5150 |
| }, |
| { |
| "epoch": 2.89, |
| "learning_rate": 2.8822985468956407e-05, |
| "loss": 0.5896, |
| "step": 5160 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 2.8820154746178526e-05, |
| "loss": 0.6256, |
| "step": 5170 |
| }, |
| { |
| "epoch": 2.9, |
| "learning_rate": 2.8817324023400642e-05, |
| "loss": 0.5468, |
| "step": 5180 |
| }, |
| { |
| "epoch": 2.91, |
| "learning_rate": 2.8814493300622758e-05, |
| "loss": 0.5556, |
| "step": 5190 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 2.8811662577844877e-05, |
| "loss": 0.5379, |
| "step": 5200 |
| }, |
| { |
| "epoch": 2.92, |
| "learning_rate": 2.8808831855066996e-05, |
| "loss": 0.5905, |
| "step": 5210 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 2.8806001132289112e-05, |
| "loss": 0.6027, |
| "step": 5220 |
| }, |
| { |
| "epoch": 2.93, |
| "learning_rate": 2.8803170409511228e-05, |
| "loss": 0.5375, |
| "step": 5230 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 2.8800339686733347e-05, |
| "loss": 0.5366, |
| "step": 5240 |
| }, |
| { |
| "epoch": 2.94, |
| "learning_rate": 2.8797508963955463e-05, |
| "loss": 0.5577, |
| "step": 5250 |
| }, |
| { |
| "epoch": 2.95, |
| "learning_rate": 2.8794678241177582e-05, |
| "loss": 0.5714, |
| "step": 5260 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 2.8791847518399698e-05, |
| "loss": 0.6394, |
| "step": 5270 |
| }, |
| { |
| "epoch": 2.96, |
| "learning_rate": 2.8789016795621817e-05, |
| "loss": 0.5567, |
| "step": 5280 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 2.8786186072843933e-05, |
| "loss": 0.554, |
| "step": 5290 |
| }, |
| { |
| "epoch": 2.97, |
| "learning_rate": 2.8783355350066052e-05, |
| "loss": 0.5419, |
| "step": 5300 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 2.8780524627288168e-05, |
| "loss": 0.5618, |
| "step": 5310 |
| }, |
| { |
| "epoch": 2.98, |
| "learning_rate": 2.8777693904510287e-05, |
| "loss": 0.6307, |
| "step": 5320 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 2.8774863181732403e-05, |
| "loss": 0.5373, |
| "step": 5330 |
| }, |
| { |
| "epoch": 2.99, |
| "learning_rate": 2.877203245895452e-05, |
| "loss": 0.5629, |
| "step": 5340 |
| }, |
| { |
| "epoch": 3.0, |
| "eval_cer": 0.23116898304990358, |
| "eval_loss": 0.6435202956199646, |
| "eval_runtime": 310.4374, |
| "eval_samples_per_second": 17.147, |
| "eval_steps_per_second": 4.287, |
| "eval_wer": 0.32985321453078864, |
| "step": 5349 |
| }, |
| { |
| "epoch": 3.0, |
| "learning_rate": 2.8769201736176638e-05, |
| "loss": 0.6119, |
| "step": 5350 |
| }, |
| { |
| "epoch": 3.01, |
| "learning_rate": 2.8766371013398757e-05, |
| "loss": 0.5217, |
| "step": 5360 |
| }, |
| { |
| "epoch": 3.01, |
| "learning_rate": 2.8763540290620873e-05, |
| "loss": 0.5462, |
| "step": 5370 |
| }, |
| { |
| "epoch": 3.02, |
| "learning_rate": 2.876070956784299e-05, |
| "loss": 0.5542, |
| "step": 5380 |
| }, |
| { |
| "epoch": 3.02, |
| "learning_rate": 2.8757878845065108e-05, |
| "loss": 0.5484, |
| "step": 5390 |
| }, |
| { |
| "epoch": 3.03, |
| "learning_rate": 2.8755048122287227e-05, |
| "loss": 0.58, |
| "step": 5400 |
| }, |
| { |
| "epoch": 3.03, |
| "learning_rate": 2.8752217399509343e-05, |
| "loss": 0.516, |
| "step": 5410 |
| }, |
| { |
| "epoch": 3.04, |
| "learning_rate": 2.874938667673146e-05, |
| "loss": 0.532, |
| "step": 5420 |
| }, |
| { |
| "epoch": 3.05, |
| "learning_rate": 2.8746555953953574e-05, |
| "loss": 0.5353, |
| "step": 5430 |
| }, |
| { |
| "epoch": 3.05, |
| "learning_rate": 2.8743725231175694e-05, |
| "loss": 0.5414, |
| "step": 5440 |
| }, |
| { |
| "epoch": 3.06, |
| "learning_rate": 2.8740894508397813e-05, |
| "loss": 0.5999, |
| "step": 5450 |
| }, |
| { |
| "epoch": 3.06, |
| "learning_rate": 2.873806378561993e-05, |
| "loss": 0.526, |
| "step": 5460 |
| }, |
| { |
| "epoch": 3.07, |
| "learning_rate": 2.8735233062842044e-05, |
| "loss": 0.5141, |
| "step": 5470 |
| }, |
| { |
| "epoch": 3.07, |
| "learning_rate": 2.8732402340064164e-05, |
| "loss": 0.5436, |
| "step": 5480 |
| }, |
| { |
| "epoch": 3.08, |
| "learning_rate": 2.8729571617286283e-05, |
| "loss": 0.5312, |
| "step": 5490 |
| }, |
| { |
| "epoch": 3.08, |
| "learning_rate": 2.87267408945084e-05, |
| "loss": 0.61, |
| "step": 5500 |
| }, |
| { |
| "epoch": 3.09, |
| "learning_rate": 2.8723910171730514e-05, |
| "loss": 0.5358, |
| "step": 5510 |
| }, |
| { |
| "epoch": 3.1, |
| "learning_rate": 2.872107944895263e-05, |
| "loss": 0.5294, |
| "step": 5520 |
| }, |
| { |
| "epoch": 3.1, |
| "learning_rate": 2.8718248726174753e-05, |
| "loss": 0.5218, |
| "step": 5530 |
| }, |
| { |
| "epoch": 3.11, |
| "learning_rate": 2.871541800339687e-05, |
| "loss": 0.5507, |
| "step": 5540 |
| }, |
| { |
| "epoch": 3.11, |
| "learning_rate": 2.8712587280618984e-05, |
| "loss": 0.6163, |
| "step": 5550 |
| }, |
| { |
| "epoch": 3.12, |
| "learning_rate": 2.87097565578411e-05, |
| "loss": 0.5209, |
| "step": 5560 |
| }, |
| { |
| "epoch": 3.12, |
| "learning_rate": 2.8706925835063223e-05, |
| "loss": 0.5191, |
| "step": 5570 |
| }, |
| { |
| "epoch": 3.13, |
| "learning_rate": 2.870409511228534e-05, |
| "loss": 0.5125, |
| "step": 5580 |
| }, |
| { |
| "epoch": 3.14, |
| "learning_rate": 2.8701264389507454e-05, |
| "loss": 0.5424, |
| "step": 5590 |
| }, |
| { |
| "epoch": 3.14, |
| "learning_rate": 2.869843366672957e-05, |
| "loss": 0.6016, |
| "step": 5600 |
| }, |
| { |
| "epoch": 3.15, |
| "learning_rate": 2.869560294395169e-05, |
| "loss": 0.5252, |
| "step": 5610 |
| }, |
| { |
| "epoch": 3.15, |
| "learning_rate": 2.869277222117381e-05, |
| "loss": 0.5056, |
| "step": 5620 |
| }, |
| { |
| "epoch": 3.16, |
| "learning_rate": 2.8689941498395924e-05, |
| "loss": 0.5316, |
| "step": 5630 |
| }, |
| { |
| "epoch": 3.16, |
| "learning_rate": 2.868711077561804e-05, |
| "loss": 0.5261, |
| "step": 5640 |
| }, |
| { |
| "epoch": 3.17, |
| "learning_rate": 2.868428005284016e-05, |
| "loss": 0.5929, |
| "step": 5650 |
| }, |
| { |
| "epoch": 3.17, |
| "learning_rate": 2.868144933006228e-05, |
| "loss": 0.5161, |
| "step": 5660 |
| }, |
| { |
| "epoch": 3.18, |
| "learning_rate": 2.8678618607284394e-05, |
| "loss": 0.4942, |
| "step": 5670 |
| }, |
| { |
| "epoch": 3.19, |
| "learning_rate": 2.867578788450651e-05, |
| "loss": 0.5363, |
| "step": 5680 |
| }, |
| { |
| "epoch": 3.19, |
| "learning_rate": 2.867295716172863e-05, |
| "loss": 0.5392, |
| "step": 5690 |
| }, |
| { |
| "epoch": 3.2, |
| "learning_rate": 2.8670126438950745e-05, |
| "loss": 0.6017, |
| "step": 5700 |
| }, |
| { |
| "epoch": 3.2, |
| "learning_rate": 2.8667295716172864e-05, |
| "loss": 0.5027, |
| "step": 5710 |
| }, |
| { |
| "epoch": 3.21, |
| "learning_rate": 2.866446499339498e-05, |
| "loss": 0.4949, |
| "step": 5720 |
| }, |
| { |
| "epoch": 3.21, |
| "learning_rate": 2.86616342706171e-05, |
| "loss": 0.5136, |
| "step": 5730 |
| }, |
| { |
| "epoch": 3.22, |
| "learning_rate": 2.8658803547839215e-05, |
| "loss": 0.5295, |
| "step": 5740 |
| }, |
| { |
| "epoch": 3.22, |
| "learning_rate": 2.8655972825061334e-05, |
| "loss": 0.5962, |
| "step": 5750 |
| }, |
| { |
| "epoch": 3.23, |
| "learning_rate": 2.865314210228345e-05, |
| "loss": 0.5169, |
| "step": 5760 |
| }, |
| { |
| "epoch": 3.24, |
| "learning_rate": 2.865031137950557e-05, |
| "loss": 0.526, |
| "step": 5770 |
| }, |
| { |
| "epoch": 3.24, |
| "learning_rate": 2.8647480656727685e-05, |
| "loss": 0.5189, |
| "step": 5780 |
| }, |
| { |
| "epoch": 3.25, |
| "learning_rate": 2.86446499339498e-05, |
| "loss": 0.5253, |
| "step": 5790 |
| }, |
| { |
| "epoch": 3.25, |
| "learning_rate": 2.864181921117192e-05, |
| "loss": 0.596, |
| "step": 5800 |
| }, |
| { |
| "epoch": 3.26, |
| "learning_rate": 2.863898848839404e-05, |
| "loss": 0.5162, |
| "step": 5810 |
| }, |
| { |
| "epoch": 3.26, |
| "learning_rate": 2.8636157765616155e-05, |
| "loss": 0.5218, |
| "step": 5820 |
| }, |
| { |
| "epoch": 3.27, |
| "learning_rate": 2.863332704283827e-05, |
| "loss": 0.5017, |
| "step": 5830 |
| }, |
| { |
| "epoch": 3.28, |
| "learning_rate": 2.863049632006039e-05, |
| "loss": 0.5136, |
| "step": 5840 |
| }, |
| { |
| "epoch": 3.28, |
| "learning_rate": 2.8627665597282506e-05, |
| "loss": 0.5759, |
| "step": 5850 |
| }, |
| { |
| "epoch": 3.29, |
| "learning_rate": 2.8624834874504625e-05, |
| "loss": 0.5061, |
| "step": 5860 |
| }, |
| { |
| "epoch": 3.29, |
| "learning_rate": 2.862200415172674e-05, |
| "loss": 0.5049, |
| "step": 5870 |
| }, |
| { |
| "epoch": 3.3, |
| "learning_rate": 2.8619173428948857e-05, |
| "loss": 0.5046, |
| "step": 5880 |
| }, |
| { |
| "epoch": 3.3, |
| "learning_rate": 2.8616342706170976e-05, |
| "loss": 0.5332, |
| "step": 5890 |
| }, |
| { |
| "epoch": 3.31, |
| "learning_rate": 2.8613511983393095e-05, |
| "loss": 0.5876, |
| "step": 5900 |
| }, |
| { |
| "epoch": 3.31, |
| "learning_rate": 2.861068126061521e-05, |
| "loss": 0.5024, |
| "step": 5910 |
| }, |
| { |
| "epoch": 3.32, |
| "learning_rate": 2.8607850537837327e-05, |
| "loss": 0.5002, |
| "step": 5920 |
| }, |
| { |
| "epoch": 3.33, |
| "learning_rate": 2.8605019815059446e-05, |
| "loss": 0.499, |
| "step": 5930 |
| }, |
| { |
| "epoch": 3.33, |
| "learning_rate": 2.8602189092281565e-05, |
| "loss": 0.5169, |
| "step": 5940 |
| }, |
| { |
| "epoch": 3.34, |
| "learning_rate": 2.859935836950368e-05, |
| "loss": 0.591, |
| "step": 5950 |
| }, |
| { |
| "epoch": 3.34, |
| "learning_rate": 2.8596527646725797e-05, |
| "loss": 0.5115, |
| "step": 5960 |
| }, |
| { |
| "epoch": 3.35, |
| "learning_rate": 2.8593696923947912e-05, |
| "loss": 0.5091, |
| "step": 5970 |
| }, |
| { |
| "epoch": 3.35, |
| "learning_rate": 2.8590866201170035e-05, |
| "loss": 0.5112, |
| "step": 5980 |
| }, |
| { |
| "epoch": 3.36, |
| "learning_rate": 2.858803547839215e-05, |
| "loss": 0.5271, |
| "step": 5990 |
| }, |
| { |
| "epoch": 3.36, |
| "learning_rate": 2.8585204755614267e-05, |
| "loss": 0.5831, |
| "step": 6000 |
| }, |
| { |
| "epoch": 3.37, |
| "learning_rate": 2.8582374032836382e-05, |
| "loss": 0.4956, |
| "step": 6010 |
| }, |
| { |
| "epoch": 3.38, |
| "learning_rate": 2.8579543310058505e-05, |
| "loss": 0.5188, |
| "step": 6020 |
| }, |
| { |
| "epoch": 3.38, |
| "learning_rate": 2.857671258728062e-05, |
| "loss": 0.5056, |
| "step": 6030 |
| }, |
| { |
| "epoch": 3.39, |
| "learning_rate": 2.8573881864502736e-05, |
| "loss": 0.5216, |
| "step": 6040 |
| }, |
| { |
| "epoch": 3.39, |
| "learning_rate": 2.8571051141724852e-05, |
| "loss": 0.5937, |
| "step": 6050 |
| }, |
| { |
| "epoch": 3.4, |
| "learning_rate": 2.856822041894697e-05, |
| "loss": 0.4992, |
| "step": 6060 |
| }, |
| { |
| "epoch": 3.4, |
| "learning_rate": 2.856538969616909e-05, |
| "loss": 0.5072, |
| "step": 6070 |
| }, |
| { |
| "epoch": 3.41, |
| "learning_rate": 2.8562558973391206e-05, |
| "loss": 0.5082, |
| "step": 6080 |
| }, |
| { |
| "epoch": 3.42, |
| "learning_rate": 2.8559728250613322e-05, |
| "loss": 0.5034, |
| "step": 6090 |
| }, |
| { |
| "epoch": 3.42, |
| "learning_rate": 2.855689752783544e-05, |
| "loss": 0.5486, |
| "step": 6100 |
| }, |
| { |
| "epoch": 3.43, |
| "learning_rate": 2.855406680505756e-05, |
| "loss": 0.4839, |
| "step": 6110 |
| }, |
| { |
| "epoch": 3.43, |
| "learning_rate": 2.8551236082279676e-05, |
| "loss": 0.5167, |
| "step": 6120 |
| }, |
| { |
| "epoch": 3.44, |
| "learning_rate": 2.8548405359501792e-05, |
| "loss": 0.4965, |
| "step": 6130 |
| }, |
| { |
| "epoch": 3.44, |
| "learning_rate": 2.854557463672391e-05, |
| "loss": 0.4942, |
| "step": 6140 |
| }, |
| { |
| "epoch": 3.45, |
| "learning_rate": 2.8542743913946027e-05, |
| "loss": 0.5887, |
| "step": 6150 |
| }, |
| { |
| "epoch": 3.45, |
| "learning_rate": 2.8539913191168146e-05, |
| "loss": 0.5081, |
| "step": 6160 |
| }, |
| { |
| "epoch": 3.46, |
| "learning_rate": 2.8537082468390262e-05, |
| "loss": 0.5092, |
| "step": 6170 |
| }, |
| { |
| "epoch": 3.47, |
| "learning_rate": 2.853425174561238e-05, |
| "loss": 0.5134, |
| "step": 6180 |
| }, |
| { |
| "epoch": 3.47, |
| "learning_rate": 2.8531421022834497e-05, |
| "loss": 0.5176, |
| "step": 6190 |
| }, |
| { |
| "epoch": 3.48, |
| "learning_rate": 2.8528590300056616e-05, |
| "loss": 0.5752, |
| "step": 6200 |
| }, |
| { |
| "epoch": 3.48, |
| "learning_rate": 2.8525759577278732e-05, |
| "loss": 0.5022, |
| "step": 6210 |
| }, |
| { |
| "epoch": 3.49, |
| "learning_rate": 2.852292885450085e-05, |
| "loss": 0.4861, |
| "step": 6220 |
| }, |
| { |
| "epoch": 3.49, |
| "learning_rate": 2.8520098131722967e-05, |
| "loss": 0.5055, |
| "step": 6230 |
| }, |
| { |
| "epoch": 3.5, |
| "learning_rate": 2.8517267408945083e-05, |
| "loss": 0.5075, |
| "step": 6240 |
| }, |
| { |
| "epoch": 3.51, |
| "learning_rate": 2.8514436686167202e-05, |
| "loss": 0.5398, |
| "step": 6250 |
| }, |
| { |
| "epoch": 3.51, |
| "learning_rate": 2.8511605963389318e-05, |
| "loss": 0.5052, |
| "step": 6260 |
| }, |
| { |
| "epoch": 3.52, |
| "learning_rate": 2.8508775240611437e-05, |
| "loss": 0.4957, |
| "step": 6270 |
| }, |
| { |
| "epoch": 3.52, |
| "learning_rate": 2.8505944517833553e-05, |
| "loss": 0.4966, |
| "step": 6280 |
| }, |
| { |
| "epoch": 3.53, |
| "learning_rate": 2.8503113795055672e-05, |
| "loss": 0.5103, |
| "step": 6290 |
| }, |
| { |
| "epoch": 3.53, |
| "learning_rate": 2.8500283072277788e-05, |
| "loss": 0.5832, |
| "step": 6300 |
| }, |
| { |
| "epoch": 3.54, |
| "learning_rate": 2.8497452349499907e-05, |
| "loss": 0.492, |
| "step": 6310 |
| }, |
| { |
| "epoch": 3.54, |
| "learning_rate": 2.8494621626722023e-05, |
| "loss": 0.5026, |
| "step": 6320 |
| }, |
| { |
| "epoch": 3.55, |
| "learning_rate": 2.849179090394414e-05, |
| "loss": 0.4916, |
| "step": 6330 |
| }, |
| { |
| "epoch": 3.56, |
| "learning_rate": 2.8488960181166258e-05, |
| "loss": 0.517, |
| "step": 6340 |
| }, |
| { |
| "epoch": 3.56, |
| "learning_rate": 2.8486129458388377e-05, |
| "loss": 0.5344, |
| "step": 6350 |
| }, |
| { |
| "epoch": 3.57, |
| "learning_rate": 2.8483298735610493e-05, |
| "loss": 0.4731, |
| "step": 6360 |
| }, |
| { |
| "epoch": 3.57, |
| "learning_rate": 2.848046801283261e-05, |
| "loss": 0.4811, |
| "step": 6370 |
| }, |
| { |
| "epoch": 3.58, |
| "learning_rate": 2.8477637290054728e-05, |
| "loss": 0.4826, |
| "step": 6380 |
| }, |
| { |
| "epoch": 3.58, |
| "learning_rate": 2.8474806567276847e-05, |
| "loss": 0.5021, |
| "step": 6390 |
| }, |
| { |
| "epoch": 3.59, |
| "learning_rate": 2.8471975844498963e-05, |
| "loss": 0.586, |
| "step": 6400 |
| }, |
| { |
| "epoch": 3.59, |
| "learning_rate": 2.846914512172108e-05, |
| "loss": 0.4972, |
| "step": 6410 |
| }, |
| { |
| "epoch": 3.6, |
| "learning_rate": 2.8466314398943198e-05, |
| "loss": 0.4858, |
| "step": 6420 |
| }, |
| { |
| "epoch": 3.61, |
| "learning_rate": 2.8463483676165317e-05, |
| "loss": 0.4786, |
| "step": 6430 |
| }, |
| { |
| "epoch": 3.61, |
| "learning_rate": 2.8460652953387433e-05, |
| "loss": 0.5119, |
| "step": 6440 |
| }, |
| { |
| "epoch": 3.62, |
| "learning_rate": 2.845782223060955e-05, |
| "loss": 0.5629, |
| "step": 6450 |
| }, |
| { |
| "epoch": 3.62, |
| "learning_rate": 2.8454991507831664e-05, |
| "loss": 0.5035, |
| "step": 6460 |
| }, |
| { |
| "epoch": 3.63, |
| "learning_rate": 2.8452160785053787e-05, |
| "loss": 0.498, |
| "step": 6470 |
| }, |
| { |
| "epoch": 3.63, |
| "learning_rate": 2.8449330062275903e-05, |
| "loss": 0.4775, |
| "step": 6480 |
| }, |
| { |
| "epoch": 3.64, |
| "learning_rate": 2.844649933949802e-05, |
| "loss": 0.5032, |
| "step": 6490 |
| }, |
| { |
| "epoch": 3.65, |
| "learning_rate": 2.8443668616720134e-05, |
| "loss": 0.5646, |
| "step": 6500 |
| }, |
| { |
| "epoch": 3.65, |
| "learning_rate": 2.8440837893942254e-05, |
| "loss": 0.4785, |
| "step": 6510 |
| }, |
| { |
| "epoch": 3.66, |
| "learning_rate": 2.8438007171164373e-05, |
| "loss": 0.4954, |
| "step": 6520 |
| }, |
| { |
| "epoch": 3.66, |
| "learning_rate": 2.843517644838649e-05, |
| "loss": 0.491, |
| "step": 6530 |
| }, |
| { |
| "epoch": 3.67, |
| "learning_rate": 2.8432345725608604e-05, |
| "loss": 0.4826, |
| "step": 6540 |
| }, |
| { |
| "epoch": 3.67, |
| "learning_rate": 2.8429515002830724e-05, |
| "loss": 0.5605, |
| "step": 6550 |
| }, |
| { |
| "epoch": 3.68, |
| "learning_rate": 2.8426684280052843e-05, |
| "loss": 0.4978, |
| "step": 6560 |
| }, |
| { |
| "epoch": 3.68, |
| "learning_rate": 2.842385355727496e-05, |
| "loss": 0.4792, |
| "step": 6570 |
| }, |
| { |
| "epoch": 3.69, |
| "learning_rate": 2.8421022834497074e-05, |
| "loss": 0.4837, |
| "step": 6580 |
| }, |
| { |
| "epoch": 3.7, |
| "learning_rate": 2.8418192111719194e-05, |
| "loss": 0.4792, |
| "step": 6590 |
| }, |
| { |
| "epoch": 3.7, |
| "learning_rate": 2.841536138894131e-05, |
| "loss": 0.5346, |
| "step": 6600 |
| }, |
| { |
| "epoch": 3.71, |
| "learning_rate": 2.841253066616343e-05, |
| "loss": 0.4862, |
| "step": 6610 |
| }, |
| { |
| "epoch": 3.71, |
| "learning_rate": 2.8409699943385544e-05, |
| "loss": 0.4706, |
| "step": 6620 |
| }, |
| { |
| "epoch": 3.72, |
| "learning_rate": 2.8406869220607664e-05, |
| "loss": 0.4802, |
| "step": 6630 |
| }, |
| { |
| "epoch": 3.72, |
| "learning_rate": 2.840403849782978e-05, |
| "loss": 0.5022, |
| "step": 6640 |
| }, |
| { |
| "epoch": 3.73, |
| "learning_rate": 2.84012077750519e-05, |
| "loss": 0.5557, |
| "step": 6650 |
| }, |
| { |
| "epoch": 3.73, |
| "learning_rate": 2.8398377052274014e-05, |
| "loss": 0.4658, |
| "step": 6660 |
| }, |
| { |
| "epoch": 3.74, |
| "learning_rate": 2.8395546329496134e-05, |
| "loss": 0.4714, |
| "step": 6670 |
| }, |
| { |
| "epoch": 3.75, |
| "learning_rate": 2.839271560671825e-05, |
| "loss": 0.4854, |
| "step": 6680 |
| }, |
| { |
| "epoch": 3.75, |
| "learning_rate": 2.838988488394037e-05, |
| "loss": 0.4855, |
| "step": 6690 |
| }, |
| { |
| "epoch": 3.76, |
| "learning_rate": 2.838733723344027e-05, |
| "loss": 0.5495, |
| "step": 6700 |
| }, |
| { |
| "epoch": 3.76, |
| "learning_rate": 2.838450651066239e-05, |
| "loss": 0.4766, |
| "step": 6710 |
| }, |
| { |
| "epoch": 3.77, |
| "learning_rate": 2.838167578788451e-05, |
| "loss": 0.4795, |
| "step": 6720 |
| }, |
| { |
| "epoch": 3.77, |
| "learning_rate": 2.8378845065106625e-05, |
| "loss": 0.4822, |
| "step": 6730 |
| }, |
| { |
| "epoch": 3.78, |
| "learning_rate": 2.837601434232874e-05, |
| "loss": 0.4787, |
| "step": 6740 |
| }, |
| { |
| "epoch": 3.79, |
| "learning_rate": 2.837318361955086e-05, |
| "loss": 0.5247, |
| "step": 6750 |
| }, |
| { |
| "epoch": 3.79, |
| "learning_rate": 2.837035289677298e-05, |
| "loss": 0.4711, |
| "step": 6760 |
| }, |
| { |
| "epoch": 3.8, |
| "learning_rate": 2.8367522173995095e-05, |
| "loss": 0.4798, |
| "step": 6770 |
| }, |
| { |
| "epoch": 3.8, |
| "learning_rate": 2.836469145121721e-05, |
| "loss": 0.4814, |
| "step": 6780 |
| }, |
| { |
| "epoch": 3.81, |
| "learning_rate": 2.836186072843933e-05, |
| "loss": 0.5087, |
| "step": 6790 |
| }, |
| { |
| "epoch": 3.81, |
| "learning_rate": 2.8359030005661446e-05, |
| "loss": 0.5377, |
| "step": 6800 |
| }, |
| { |
| "epoch": 3.82, |
| "learning_rate": 2.8356199282883565e-05, |
| "loss": 0.472, |
| "step": 6810 |
| }, |
| { |
| "epoch": 3.82, |
| "learning_rate": 2.835336856010568e-05, |
| "loss": 0.4858, |
| "step": 6820 |
| }, |
| { |
| "epoch": 3.83, |
| "learning_rate": 2.8350537837327796e-05, |
| "loss": 0.4741, |
| "step": 6830 |
| }, |
| { |
| "epoch": 3.84, |
| "learning_rate": 2.8347707114549916e-05, |
| "loss": 0.4874, |
| "step": 6840 |
| }, |
| { |
| "epoch": 3.84, |
| "learning_rate": 2.8344876391772035e-05, |
| "loss": 0.5471, |
| "step": 6850 |
| }, |
| { |
| "epoch": 3.85, |
| "learning_rate": 2.834204566899415e-05, |
| "loss": 0.4661, |
| "step": 6860 |
| }, |
| { |
| "epoch": 3.85, |
| "learning_rate": 2.8339214946216266e-05, |
| "loss": 0.4638, |
| "step": 6870 |
| }, |
| { |
| "epoch": 3.86, |
| "learning_rate": 2.8336384223438386e-05, |
| "loss": 0.4642, |
| "step": 6880 |
| }, |
| { |
| "epoch": 3.86, |
| "learning_rate": 2.83335535006605e-05, |
| "loss": 0.4906, |
| "step": 6890 |
| }, |
| { |
| "epoch": 3.87, |
| "learning_rate": 2.833072277788262e-05, |
| "loss": 0.5481, |
| "step": 6900 |
| }, |
| { |
| "epoch": 3.88, |
| "learning_rate": 2.8327892055104736e-05, |
| "loss": 0.4719, |
| "step": 6910 |
| }, |
| { |
| "epoch": 3.88, |
| "learning_rate": 2.8325061332326856e-05, |
| "loss": 0.4793, |
| "step": 6920 |
| }, |
| { |
| "epoch": 3.89, |
| "learning_rate": 2.832223060954897e-05, |
| "loss": 0.4747, |
| "step": 6930 |
| }, |
| { |
| "epoch": 3.89, |
| "learning_rate": 2.831939988677109e-05, |
| "loss": 0.4771, |
| "step": 6940 |
| }, |
| { |
| "epoch": 3.9, |
| "learning_rate": 2.8316569163993206e-05, |
| "loss": 0.525, |
| "step": 6950 |
| }, |
| { |
| "epoch": 3.9, |
| "learning_rate": 2.8313738441215326e-05, |
| "loss": 0.4757, |
| "step": 6960 |
| }, |
| { |
| "epoch": 3.91, |
| "learning_rate": 2.831090771843744e-05, |
| "loss": 0.4587, |
| "step": 6970 |
| }, |
| { |
| "epoch": 3.91, |
| "learning_rate": 2.8308076995659557e-05, |
| "loss": 0.4628, |
| "step": 6980 |
| }, |
| { |
| "epoch": 3.92, |
| "learning_rate": 2.8305246272881676e-05, |
| "loss": 0.4797, |
| "step": 6990 |
| }, |
| { |
| "epoch": 3.93, |
| "learning_rate": 2.8302415550103796e-05, |
| "loss": 0.5331, |
| "step": 7000 |
| }, |
| { |
| "epoch": 3.93, |
| "learning_rate": 2.829958482732591e-05, |
| "loss": 0.4724, |
| "step": 7010 |
| }, |
| { |
| "epoch": 3.94, |
| "learning_rate": 2.8296754104548027e-05, |
| "loss": 0.4842, |
| "step": 7020 |
| }, |
| { |
| "epoch": 3.94, |
| "learning_rate": 2.8293923381770146e-05, |
| "loss": 0.4781, |
| "step": 7030 |
| }, |
| { |
| "epoch": 3.95, |
| "learning_rate": 2.8291092658992266e-05, |
| "loss": 0.4768, |
| "step": 7040 |
| }, |
| { |
| "epoch": 3.95, |
| "learning_rate": 2.828826193621438e-05, |
| "loss": 0.5445, |
| "step": 7050 |
| }, |
| { |
| "epoch": 3.96, |
| "learning_rate": 2.8285431213436497e-05, |
| "loss": 0.4642, |
| "step": 7060 |
| }, |
| { |
| "epoch": 3.96, |
| "learning_rate": 2.8282600490658613e-05, |
| "loss": 0.4747, |
| "step": 7070 |
| }, |
| { |
| "epoch": 3.97, |
| "learning_rate": 2.8279769767880736e-05, |
| "loss": 0.4664, |
| "step": 7080 |
| }, |
| { |
| "epoch": 3.98, |
| "learning_rate": 2.827693904510285e-05, |
| "loss": 0.4758, |
| "step": 7090 |
| }, |
| { |
| "epoch": 3.98, |
| "learning_rate": 2.8274108322324967e-05, |
| "loss": 0.533, |
| "step": 7100 |
| }, |
| { |
| "epoch": 3.99, |
| "learning_rate": 2.8271277599547083e-05, |
| "loss": 0.4736, |
| "step": 7110 |
| }, |
| { |
| "epoch": 3.99, |
| "learning_rate": 2.8268446876769205e-05, |
| "loss": 0.4604, |
| "step": 7120 |
| }, |
| { |
| "epoch": 4.0, |
| "learning_rate": 2.826561615399132e-05, |
| "loss": 0.4956, |
| "step": 7130 |
| }, |
| { |
| "epoch": 4.0, |
| "eval_cer": 0.20729386911800468, |
| "eval_loss": 0.5549563765525818, |
| "eval_runtime": 311.5828, |
| "eval_samples_per_second": 17.084, |
| "eval_steps_per_second": 4.272, |
| "eval_wer": 0.2908813175198698, |
| "step": 7132 |
| }, |
| { |
| "epoch": 4.0, |
| "learning_rate": 2.8262785431213437e-05, |
| "loss": 0.5154, |
| "step": 7140 |
| }, |
| { |
| "epoch": 4.01, |
| "learning_rate": 2.8259954708435553e-05, |
| "loss": 0.4779, |
| "step": 7150 |
| }, |
| { |
| "epoch": 4.02, |
| "learning_rate": 2.8257123985657672e-05, |
| "loss": 0.4423, |
| "step": 7160 |
| }, |
| { |
| "epoch": 4.02, |
| "learning_rate": 2.825429326287979e-05, |
| "loss": 0.4677, |
| "step": 7170 |
| }, |
| { |
| "epoch": 4.03, |
| "learning_rate": 2.8251462540101907e-05, |
| "loss": 0.4874, |
| "step": 7180 |
| }, |
| { |
| "epoch": 4.03, |
| "learning_rate": 2.8248631817324023e-05, |
| "loss": 0.4821, |
| "step": 7190 |
| }, |
| { |
| "epoch": 4.04, |
| "learning_rate": 2.8245801094546142e-05, |
| "loss": 0.4463, |
| "step": 7200 |
| }, |
| { |
| "epoch": 4.04, |
| "learning_rate": 2.824297037176826e-05, |
| "loss": 0.4495, |
| "step": 7210 |
| }, |
| { |
| "epoch": 4.05, |
| "learning_rate": 2.8240139648990377e-05, |
| "loss": 0.4495, |
| "step": 7220 |
| }, |
| { |
| "epoch": 4.05, |
| "learning_rate": 2.8237308926212493e-05, |
| "loss": 0.496, |
| "step": 7230 |
| }, |
| { |
| "epoch": 4.06, |
| "learning_rate": 2.823447820343461e-05, |
| "loss": 0.4797, |
| "step": 7240 |
| }, |
| { |
| "epoch": 4.07, |
| "learning_rate": 2.8231647480656728e-05, |
| "loss": 0.4512, |
| "step": 7250 |
| }, |
| { |
| "epoch": 4.07, |
| "learning_rate": 2.8228816757878847e-05, |
| "loss": 0.4532, |
| "step": 7260 |
| }, |
| { |
| "epoch": 4.08, |
| "learning_rate": 2.8225986035100963e-05, |
| "loss": 0.454, |
| "step": 7270 |
| }, |
| { |
| "epoch": 4.08, |
| "learning_rate": 2.822315531232308e-05, |
| "loss": 0.4878, |
| "step": 7280 |
| }, |
| { |
| "epoch": 4.09, |
| "learning_rate": 2.8220324589545198e-05, |
| "loss": 0.4851, |
| "step": 7290 |
| }, |
| { |
| "epoch": 4.09, |
| "learning_rate": 2.8217493866767317e-05, |
| "loss": 0.4582, |
| "step": 7300 |
| }, |
| { |
| "epoch": 4.1, |
| "learning_rate": 2.8214663143989433e-05, |
| "loss": 0.4438, |
| "step": 7310 |
| }, |
| { |
| "epoch": 4.11, |
| "learning_rate": 2.821183242121155e-05, |
| "loss": 0.452, |
| "step": 7320 |
| }, |
| { |
| "epoch": 4.11, |
| "learning_rate": 2.8209001698433668e-05, |
| "loss": 0.4766, |
| "step": 7330 |
| }, |
| { |
| "epoch": 4.12, |
| "learning_rate": 2.8206170975655784e-05, |
| "loss": 0.4583, |
| "step": 7340 |
| }, |
| { |
| "epoch": 4.12, |
| "learning_rate": 2.8203340252877903e-05, |
| "loss": 0.4489, |
| "step": 7350 |
| }, |
| { |
| "epoch": 4.13, |
| "learning_rate": 2.820050953010002e-05, |
| "loss": 0.4514, |
| "step": 7360 |
| }, |
| { |
| "epoch": 4.13, |
| "learning_rate": 2.8197678807322138e-05, |
| "loss": 0.4509, |
| "step": 7370 |
| }, |
| { |
| "epoch": 4.14, |
| "learning_rate": 2.8194848084544254e-05, |
| "loss": 0.49, |
| "step": 7380 |
| }, |
| { |
| "epoch": 4.14, |
| "learning_rate": 2.8192017361766373e-05, |
| "loss": 0.4661, |
| "step": 7390 |
| }, |
| { |
| "epoch": 4.15, |
| "learning_rate": 2.818918663898849e-05, |
| "loss": 0.447, |
| "step": 7400 |
| }, |
| { |
| "epoch": 4.16, |
| "learning_rate": 2.8186355916210608e-05, |
| "loss": 0.4479, |
| "step": 7410 |
| }, |
| { |
| "epoch": 4.16, |
| "learning_rate": 2.8183525193432724e-05, |
| "loss": 0.4485, |
| "step": 7420 |
| }, |
| { |
| "epoch": 4.17, |
| "learning_rate": 2.818069447065484e-05, |
| "loss": 0.481, |
| "step": 7430 |
| }, |
| { |
| "epoch": 4.17, |
| "learning_rate": 2.817786374787696e-05, |
| "loss": 0.4705, |
| "step": 7440 |
| }, |
| { |
| "epoch": 4.18, |
| "learning_rate": 2.8175033025099078e-05, |
| "loss": 0.4519, |
| "step": 7450 |
| }, |
| { |
| "epoch": 4.18, |
| "learning_rate": 2.8172202302321194e-05, |
| "loss": 0.4532, |
| "step": 7460 |
| }, |
| { |
| "epoch": 4.19, |
| "learning_rate": 2.816937157954331e-05, |
| "loss": 0.4406, |
| "step": 7470 |
| }, |
| { |
| "epoch": 4.2, |
| "learning_rate": 2.816654085676543e-05, |
| "loss": 0.4669, |
| "step": 7480 |
| }, |
| { |
| "epoch": 4.2, |
| "learning_rate": 2.8163710133987548e-05, |
| "loss": 0.4764, |
| "step": 7490 |
| }, |
| { |
| "epoch": 4.21, |
| "learning_rate": 2.8160879411209664e-05, |
| "loss": 0.454, |
| "step": 7500 |
| }, |
| { |
| "epoch": 4.21, |
| "learning_rate": 2.815804868843178e-05, |
| "loss": 0.4431, |
| "step": 7510 |
| }, |
| { |
| "epoch": 4.22, |
| "learning_rate": 2.8155217965653895e-05, |
| "loss": 0.4352, |
| "step": 7520 |
| }, |
| { |
| "epoch": 4.22, |
| "learning_rate": 2.8152387242876018e-05, |
| "loss": 0.488, |
| "step": 7530 |
| }, |
| { |
| "epoch": 4.23, |
| "learning_rate": 2.8149556520098133e-05, |
| "loss": 0.4682, |
| "step": 7540 |
| }, |
| { |
| "epoch": 4.23, |
| "learning_rate": 2.814672579732025e-05, |
| "loss": 0.4662, |
| "step": 7550 |
| }, |
| { |
| "epoch": 4.24, |
| "learning_rate": 2.8143895074542365e-05, |
| "loss": 0.4424, |
| "step": 7560 |
| }, |
| { |
| "epoch": 4.25, |
| "learning_rate": 2.8141064351764488e-05, |
| "loss": 0.447, |
| "step": 7570 |
| }, |
| { |
| "epoch": 4.25, |
| "learning_rate": 2.8138233628986603e-05, |
| "loss": 0.4747, |
| "step": 7580 |
| }, |
| { |
| "epoch": 4.26, |
| "learning_rate": 2.813540290620872e-05, |
| "loss": 0.4629, |
| "step": 7590 |
| }, |
| { |
| "epoch": 4.26, |
| "learning_rate": 2.8132572183430835e-05, |
| "loss": 0.4383, |
| "step": 7600 |
| }, |
| { |
| "epoch": 4.27, |
| "learning_rate": 2.8129741460652954e-05, |
| "loss": 0.4417, |
| "step": 7610 |
| }, |
| { |
| "epoch": 4.27, |
| "learning_rate": 2.8126910737875073e-05, |
| "loss": 0.452, |
| "step": 7620 |
| }, |
| { |
| "epoch": 4.28, |
| "learning_rate": 2.812408001509719e-05, |
| "loss": 0.4675, |
| "step": 7630 |
| }, |
| { |
| "epoch": 4.28, |
| "learning_rate": 2.8121249292319305e-05, |
| "loss": 0.4647, |
| "step": 7640 |
| }, |
| { |
| "epoch": 4.29, |
| "learning_rate": 2.811841856954142e-05, |
| "loss": 0.4499, |
| "step": 7650 |
| }, |
| { |
| "epoch": 4.3, |
| "learning_rate": 2.8115587846763543e-05, |
| "loss": 0.4471, |
| "step": 7660 |
| }, |
| { |
| "epoch": 4.3, |
| "learning_rate": 2.811275712398566e-05, |
| "loss": 0.4537, |
| "step": 7670 |
| }, |
| { |
| "epoch": 4.31, |
| "learning_rate": 2.8109926401207775e-05, |
| "loss": 0.4788, |
| "step": 7680 |
| }, |
| { |
| "epoch": 4.31, |
| "learning_rate": 2.810709567842989e-05, |
| "loss": 0.4502, |
| "step": 7690 |
| }, |
| { |
| "epoch": 4.32, |
| "learning_rate": 2.810426495565201e-05, |
| "loss": 0.4377, |
| "step": 7700 |
| }, |
| { |
| "epoch": 4.32, |
| "learning_rate": 2.810143423287413e-05, |
| "loss": 0.4487, |
| "step": 7710 |
| }, |
| { |
| "epoch": 4.33, |
| "learning_rate": 2.8098603510096245e-05, |
| "loss": 0.4453, |
| "step": 7720 |
| }, |
| { |
| "epoch": 4.34, |
| "learning_rate": 2.809577278731836e-05, |
| "loss": 0.5011, |
| "step": 7730 |
| }, |
| { |
| "epoch": 4.34, |
| "learning_rate": 2.809294206454048e-05, |
| "loss": 0.467, |
| "step": 7740 |
| }, |
| { |
| "epoch": 4.35, |
| "learning_rate": 2.80901113417626e-05, |
| "loss": 0.4451, |
| "step": 7750 |
| }, |
| { |
| "epoch": 4.35, |
| "learning_rate": 2.8087280618984715e-05, |
| "loss": 0.4419, |
| "step": 7760 |
| }, |
| { |
| "epoch": 4.36, |
| "learning_rate": 2.808444989620683e-05, |
| "loss": 0.4614, |
| "step": 7770 |
| }, |
| { |
| "epoch": 4.36, |
| "learning_rate": 2.808161917342895e-05, |
| "loss": 0.4952, |
| "step": 7780 |
| }, |
| { |
| "epoch": 4.37, |
| "learning_rate": 2.8078788450651066e-05, |
| "loss": 0.4676, |
| "step": 7790 |
| }, |
| { |
| "epoch": 4.37, |
| "learning_rate": 2.8075957727873185e-05, |
| "loss": 0.4554, |
| "step": 7800 |
| }, |
| { |
| "epoch": 4.38, |
| "learning_rate": 2.80731270050953e-05, |
| "loss": 0.44, |
| "step": 7810 |
| }, |
| { |
| "epoch": 4.39, |
| "learning_rate": 2.807029628231742e-05, |
| "loss": 0.4507, |
| "step": 7820 |
| }, |
| { |
| "epoch": 4.39, |
| "learning_rate": 2.8067465559539536e-05, |
| "loss": 0.4736, |
| "step": 7830 |
| }, |
| { |
| "epoch": 4.4, |
| "learning_rate": 2.8064634836761655e-05, |
| "loss": 0.4548, |
| "step": 7840 |
| }, |
| { |
| "epoch": 4.4, |
| "learning_rate": 2.806180411398377e-05, |
| "loss": 0.4343, |
| "step": 7850 |
| }, |
| { |
| "epoch": 4.41, |
| "learning_rate": 2.805897339120589e-05, |
| "loss": 0.4436, |
| "step": 7860 |
| }, |
| { |
| "epoch": 4.41, |
| "learning_rate": 2.8056142668428006e-05, |
| "loss": 0.4524, |
| "step": 7870 |
| }, |
| { |
| "epoch": 4.42, |
| "learning_rate": 2.805331194565012e-05, |
| "loss": 0.4795, |
| "step": 7880 |
| }, |
| { |
| "epoch": 4.42, |
| "learning_rate": 2.805048122287224e-05, |
| "loss": 0.4542, |
| "step": 7890 |
| }, |
| { |
| "epoch": 4.43, |
| "learning_rate": 2.804765050009436e-05, |
| "loss": 0.4278, |
| "step": 7900 |
| }, |
| { |
| "epoch": 4.44, |
| "learning_rate": 2.8044819777316476e-05, |
| "loss": 0.432, |
| "step": 7910 |
| }, |
| { |
| "epoch": 4.44, |
| "learning_rate": 2.804198905453859e-05, |
| "loss": 0.4351, |
| "step": 7920 |
| }, |
| { |
| "epoch": 4.45, |
| "learning_rate": 2.803915833176071e-05, |
| "loss": 0.4885, |
| "step": 7930 |
| }, |
| { |
| "epoch": 4.45, |
| "learning_rate": 2.803632760898283e-05, |
| "loss": 0.4617, |
| "step": 7940 |
| }, |
| { |
| "epoch": 4.46, |
| "learning_rate": 2.8033496886204946e-05, |
| "loss": 0.4319, |
| "step": 7950 |
| }, |
| { |
| "epoch": 4.46, |
| "learning_rate": 2.803066616342706e-05, |
| "loss": 0.437, |
| "step": 7960 |
| }, |
| { |
| "epoch": 4.47, |
| "learning_rate": 2.8027835440649177e-05, |
| "loss": 0.4394, |
| "step": 7970 |
| }, |
| { |
| "epoch": 4.48, |
| "learning_rate": 2.80250047178713e-05, |
| "loss": 0.4806, |
| "step": 7980 |
| }, |
| { |
| "epoch": 4.48, |
| "learning_rate": 2.8022173995093416e-05, |
| "loss": 0.4415, |
| "step": 7990 |
| }, |
| { |
| "epoch": 4.49, |
| "learning_rate": 2.801934327231553e-05, |
| "loss": 0.4367, |
| "step": 8000 |
| }, |
| { |
| "epoch": 4.49, |
| "learning_rate": 2.8016512549537647e-05, |
| "loss": 0.4485, |
| "step": 8010 |
| }, |
| { |
| "epoch": 4.5, |
| "learning_rate": 2.801368182675977e-05, |
| "loss": 0.4499, |
| "step": 8020 |
| }, |
| { |
| "epoch": 4.5, |
| "learning_rate": 2.8010851103981886e-05, |
| "loss": 0.463, |
| "step": 8030 |
| }, |
| { |
| "epoch": 4.51, |
| "learning_rate": 2.8008020381204e-05, |
| "loss": 0.4574, |
| "step": 8040 |
| }, |
| { |
| "epoch": 4.51, |
| "learning_rate": 2.8005189658426117e-05, |
| "loss": 0.44, |
| "step": 8050 |
| }, |
| { |
| "epoch": 4.52, |
| "learning_rate": 2.8002358935648233e-05, |
| "loss": 0.4315, |
| "step": 8060 |
| }, |
| { |
| "epoch": 4.53, |
| "learning_rate": 2.7999528212870356e-05, |
| "loss": 0.4489, |
| "step": 8070 |
| }, |
| { |
| "epoch": 4.53, |
| "learning_rate": 2.799669749009247e-05, |
| "loss": 0.4986, |
| "step": 8080 |
| }, |
| { |
| "epoch": 4.54, |
| "learning_rate": 2.7993866767314587e-05, |
| "loss": 0.4663, |
| "step": 8090 |
| }, |
| { |
| "epoch": 4.54, |
| "learning_rate": 2.7991036044536703e-05, |
| "loss": 0.4351, |
| "step": 8100 |
| }, |
| { |
| "epoch": 4.55, |
| "learning_rate": 2.7988205321758826e-05, |
| "loss": 0.4317, |
| "step": 8110 |
| }, |
| { |
| "epoch": 4.55, |
| "learning_rate": 2.798537459898094e-05, |
| "loss": 0.439, |
| "step": 8120 |
| }, |
| { |
| "epoch": 4.56, |
| "learning_rate": 2.7982543876203057e-05, |
| "loss": 0.4973, |
| "step": 8130 |
| }, |
| { |
| "epoch": 4.57, |
| "learning_rate": 2.7979713153425173e-05, |
| "loss": 0.4811, |
| "step": 8140 |
| }, |
| { |
| "epoch": 4.57, |
| "learning_rate": 2.7976882430647292e-05, |
| "loss": 0.4474, |
| "step": 8150 |
| }, |
| { |
| "epoch": 4.58, |
| "learning_rate": 2.797405170786941e-05, |
| "loss": 0.445, |
| "step": 8160 |
| }, |
| { |
| "epoch": 4.58, |
| "learning_rate": 2.7971220985091527e-05, |
| "loss": 0.4554, |
| "step": 8170 |
| }, |
| { |
| "epoch": 4.59, |
| "learning_rate": 2.7968390262313643e-05, |
| "loss": 0.4678, |
| "step": 8180 |
| }, |
| { |
| "epoch": 4.59, |
| "learning_rate": 2.7965559539535762e-05, |
| "loss": 0.4691, |
| "step": 8190 |
| }, |
| { |
| "epoch": 4.6, |
| "learning_rate": 2.796272881675788e-05, |
| "loss": 0.43, |
| "step": 8200 |
| }, |
| { |
| "epoch": 4.6, |
| "learning_rate": 2.7959898093979997e-05, |
| "loss": 0.4402, |
| "step": 8210 |
| }, |
| { |
| "epoch": 4.61, |
| "learning_rate": 2.7957067371202113e-05, |
| "loss": 0.4356, |
| "step": 8220 |
| }, |
| { |
| "epoch": 4.62, |
| "learning_rate": 2.7954236648424232e-05, |
| "loss": 0.4786, |
| "step": 8230 |
| }, |
| { |
| "epoch": 4.62, |
| "learning_rate": 2.7951405925646348e-05, |
| "loss": 0.4721, |
| "step": 8240 |
| }, |
| { |
| "epoch": 4.63, |
| "learning_rate": 2.7948575202868467e-05, |
| "loss": 0.4351, |
| "step": 8250 |
| }, |
| { |
| "epoch": 4.63, |
| "learning_rate": 2.7945744480090583e-05, |
| "loss": 0.4268, |
| "step": 8260 |
| }, |
| { |
| "epoch": 4.64, |
| "learning_rate": 2.7942913757312702e-05, |
| "loss": 0.4467, |
| "step": 8270 |
| }, |
| { |
| "epoch": 4.64, |
| "learning_rate": 2.7940083034534818e-05, |
| "loss": 0.476, |
| "step": 8280 |
| }, |
| { |
| "epoch": 4.65, |
| "learning_rate": 2.7937252311756937e-05, |
| "loss": 0.4309, |
| "step": 8290 |
| }, |
| { |
| "epoch": 4.65, |
| "learning_rate": 2.7934421588979053e-05, |
| "loss": 0.4498, |
| "step": 8300 |
| }, |
| { |
| "epoch": 4.66, |
| "learning_rate": 2.7931590866201172e-05, |
| "loss": 0.4317, |
| "step": 8310 |
| }, |
| { |
| "epoch": 4.67, |
| "learning_rate": 2.7928760143423288e-05, |
| "loss": 0.4455, |
| "step": 8320 |
| }, |
| { |
| "epoch": 4.67, |
| "learning_rate": 2.7925929420645404e-05, |
| "loss": 0.4597, |
| "step": 8330 |
| }, |
| { |
| "epoch": 4.68, |
| "learning_rate": 2.7923098697867523e-05, |
| "loss": 0.4522, |
| "step": 8340 |
| }, |
| { |
| "epoch": 4.68, |
| "learning_rate": 2.7920267975089642e-05, |
| "loss": 0.4146, |
| "step": 8350 |
| }, |
| { |
| "epoch": 4.69, |
| "learning_rate": 2.7917437252311758e-05, |
| "loss": 0.4359, |
| "step": 8360 |
| }, |
| { |
| "epoch": 4.69, |
| "learning_rate": 2.7914606529533874e-05, |
| "loss": 0.4223, |
| "step": 8370 |
| }, |
| { |
| "epoch": 4.7, |
| "learning_rate": 2.7911775806755993e-05, |
| "loss": 0.4567, |
| "step": 8380 |
| }, |
| { |
| "epoch": 4.71, |
| "learning_rate": 2.7908945083978112e-05, |
| "loss": 0.4661, |
| "step": 8390 |
| }, |
| { |
| "epoch": 4.71, |
| "learning_rate": 2.7906114361200228e-05, |
| "loss": 0.4244, |
| "step": 8400 |
| }, |
| { |
| "epoch": 4.72, |
| "learning_rate": 2.7903283638422344e-05, |
| "loss": 0.44, |
| "step": 8410 |
| }, |
| { |
| "epoch": 4.72, |
| "learning_rate": 2.790045291564446e-05, |
| "loss": 0.4301, |
| "step": 8420 |
| }, |
| { |
| "epoch": 4.73, |
| "learning_rate": 2.7897622192866582e-05, |
| "loss": 0.4829, |
| "step": 8430 |
| }, |
| { |
| "epoch": 4.73, |
| "learning_rate": 2.7894791470088698e-05, |
| "loss": 0.4533, |
| "step": 8440 |
| }, |
| { |
| "epoch": 4.74, |
| "learning_rate": 2.7891960747310814e-05, |
| "loss": 0.434, |
| "step": 8450 |
| }, |
| { |
| "epoch": 4.74, |
| "learning_rate": 2.788913002453293e-05, |
| "loss": 0.4112, |
| "step": 8460 |
| }, |
| { |
| "epoch": 4.75, |
| "learning_rate": 2.7886299301755052e-05, |
| "loss": 0.4387, |
| "step": 8470 |
| }, |
| { |
| "epoch": 4.76, |
| "learning_rate": 2.7883468578977168e-05, |
| "loss": 0.4849, |
| "step": 8480 |
| }, |
| { |
| "epoch": 4.76, |
| "learning_rate": 2.7880637856199284e-05, |
| "loss": 0.4331, |
| "step": 8490 |
| }, |
| { |
| "epoch": 4.77, |
| "learning_rate": 2.78778071334214e-05, |
| "loss": 0.4164, |
| "step": 8500 |
| }, |
| { |
| "epoch": 4.77, |
| "learning_rate": 2.7874976410643515e-05, |
| "loss": 0.4086, |
| "step": 8510 |
| }, |
| { |
| "epoch": 4.78, |
| "learning_rate": 2.7872145687865638e-05, |
| "loss": 0.4276, |
| "step": 8520 |
| }, |
| { |
| "epoch": 4.78, |
| "learning_rate": 2.7869314965087754e-05, |
| "loss": 0.4671, |
| "step": 8530 |
| }, |
| { |
| "epoch": 4.79, |
| "learning_rate": 2.786648424230987e-05, |
| "loss": 0.4535, |
| "step": 8540 |
| }, |
| { |
| "epoch": 4.79, |
| "learning_rate": 2.7863653519531985e-05, |
| "loss": 0.428, |
| "step": 8550 |
| }, |
| { |
| "epoch": 4.8, |
| "learning_rate": 2.7860822796754108e-05, |
| "loss": 0.4387, |
| "step": 8560 |
| }, |
| { |
| "epoch": 4.81, |
| "learning_rate": 2.7857992073976224e-05, |
| "loss": 0.4344, |
| "step": 8570 |
| }, |
| { |
| "epoch": 4.81, |
| "learning_rate": 2.785516135119834e-05, |
| "loss": 0.4608, |
| "step": 8580 |
| }, |
| { |
| "epoch": 4.82, |
| "learning_rate": 2.7852330628420455e-05, |
| "loss": 0.4641, |
| "step": 8590 |
| }, |
| { |
| "epoch": 4.82, |
| "learning_rate": 2.7849499905642574e-05, |
| "loss": 0.419, |
| "step": 8600 |
| }, |
| { |
| "epoch": 4.83, |
| "learning_rate": 2.7846669182864694e-05, |
| "loss": 0.4183, |
| "step": 8610 |
| }, |
| { |
| "epoch": 4.83, |
| "learning_rate": 2.784383846008681e-05, |
| "loss": 0.4219, |
| "step": 8620 |
| }, |
| { |
| "epoch": 4.84, |
| "learning_rate": 2.7841007737308925e-05, |
| "loss": 0.4745, |
| "step": 8630 |
| }, |
| { |
| "epoch": 4.85, |
| "learning_rate": 2.7838177014531044e-05, |
| "loss": 0.4371, |
| "step": 8640 |
| }, |
| { |
| "epoch": 4.85, |
| "learning_rate": 2.7835346291753163e-05, |
| "loss": 0.4179, |
| "step": 8650 |
| }, |
| { |
| "epoch": 4.86, |
| "learning_rate": 2.783251556897528e-05, |
| "loss": 0.4372, |
| "step": 8660 |
| }, |
| { |
| "epoch": 4.86, |
| "learning_rate": 2.7829684846197395e-05, |
| "loss": 0.4367, |
| "step": 8670 |
| }, |
| { |
| "epoch": 4.87, |
| "learning_rate": 2.7826854123419514e-05, |
| "loss": 0.4695, |
| "step": 8680 |
| }, |
| { |
| "epoch": 4.87, |
| "learning_rate": 2.782402340064163e-05, |
| "loss": 0.4492, |
| "step": 8690 |
| }, |
| { |
| "epoch": 4.88, |
| "learning_rate": 2.782119267786375e-05, |
| "loss": 0.4253, |
| "step": 8700 |
| }, |
| { |
| "epoch": 4.88, |
| "learning_rate": 2.7818361955085865e-05, |
| "loss": 0.4137, |
| "step": 8710 |
| }, |
| { |
| "epoch": 4.89, |
| "learning_rate": 2.7815531232307984e-05, |
| "loss": 0.411, |
| "step": 8720 |
| }, |
| { |
| "epoch": 4.9, |
| "learning_rate": 2.78127005095301e-05, |
| "loss": 0.4451, |
| "step": 8730 |
| }, |
| { |
| "epoch": 4.9, |
| "learning_rate": 2.780986978675222e-05, |
| "loss": 0.4384, |
| "step": 8740 |
| }, |
| { |
| "epoch": 4.91, |
| "learning_rate": 2.7807039063974335e-05, |
| "loss": 0.4159, |
| "step": 8750 |
| }, |
| { |
| "epoch": 4.91, |
| "learning_rate": 2.7804208341196454e-05, |
| "loss": 0.4173, |
| "step": 8760 |
| }, |
| { |
| "epoch": 4.92, |
| "learning_rate": 2.780137761841857e-05, |
| "loss": 0.4271, |
| "step": 8770 |
| }, |
| { |
| "epoch": 4.92, |
| "learning_rate": 2.7798546895640686e-05, |
| "loss": 0.4759, |
| "step": 8780 |
| }, |
| { |
| "epoch": 4.93, |
| "learning_rate": 2.7795716172862805e-05, |
| "loss": 0.4306, |
| "step": 8790 |
| }, |
| { |
| "epoch": 4.94, |
| "learning_rate": 2.7792885450084924e-05, |
| "loss": 0.4192, |
| "step": 8800 |
| }, |
| { |
| "epoch": 4.94, |
| "learning_rate": 2.779005472730704e-05, |
| "loss": 0.4121, |
| "step": 8810 |
| }, |
| { |
| "epoch": 4.95, |
| "learning_rate": 2.7787224004529156e-05, |
| "loss": 0.4231, |
| "step": 8820 |
| }, |
| { |
| "epoch": 4.95, |
| "learning_rate": 2.7784393281751275e-05, |
| "loss": 0.4415, |
| "step": 8830 |
| }, |
| { |
| "epoch": 4.96, |
| "learning_rate": 2.7781562558973394e-05, |
| "loss": 0.4662, |
| "step": 8840 |
| }, |
| { |
| "epoch": 4.96, |
| "learning_rate": 2.777873183619551e-05, |
| "loss": 0.4296, |
| "step": 8850 |
| }, |
| { |
| "epoch": 4.97, |
| "learning_rate": 2.7775901113417626e-05, |
| "loss": 0.4223, |
| "step": 8860 |
| }, |
| { |
| "epoch": 4.97, |
| "learning_rate": 2.777307039063974e-05, |
| "loss": 0.4061, |
| "step": 8870 |
| }, |
| { |
| "epoch": 4.98, |
| "learning_rate": 2.777052274013965e-05, |
| "loss": 0.4841, |
| "step": 8880 |
| }, |
| { |
| "epoch": 4.99, |
| "learning_rate": 2.7767692017361766e-05, |
| "loss": 0.4521, |
| "step": 8890 |
| }, |
| { |
| "epoch": 4.99, |
| "learning_rate": 2.7764861294583886e-05, |
| "loss": 0.4141, |
| "step": 8900 |
| }, |
| { |
| "epoch": 5.0, |
| "learning_rate": 2.7762030571806e-05, |
| "loss": 0.4291, |
| "step": 8910 |
| }, |
| { |
| "epoch": 5.0, |
| "eval_cer": 0.19871393858165692, |
| "eval_loss": 0.5059424042701721, |
| "eval_runtime": 313.4635, |
| "eval_samples_per_second": 16.981, |
| "eval_steps_per_second": 4.246, |
| "eval_wer": 0.27467786334201827, |
| "step": 8915 |
| }, |
| { |
| "epoch": 5.0, |
| "learning_rate": 2.775919984902812e-05, |
| "loss": 0.485, |
| "step": 8920 |
| }, |
| { |
| "epoch": 5.01, |
| "learning_rate": 2.7756369126250236e-05, |
| "loss": 0.4057, |
| "step": 8930 |
| }, |
| { |
| "epoch": 5.01, |
| "learning_rate": 2.7753538403472352e-05, |
| "loss": 0.4006, |
| "step": 8940 |
| }, |
| { |
| "epoch": 5.02, |
| "learning_rate": 2.775070768069447e-05, |
| "loss": 0.4063, |
| "step": 8950 |
| }, |
| { |
| "epoch": 5.03, |
| "learning_rate": 2.774787695791659e-05, |
| "loss": 0.4167, |
| "step": 8960 |
| }, |
| { |
| "epoch": 5.03, |
| "learning_rate": 2.7745046235138706e-05, |
| "loss": 0.4411, |
| "step": 8970 |
| }, |
| { |
| "epoch": 5.04, |
| "learning_rate": 2.7742215512360822e-05, |
| "loss": 0.4192, |
| "step": 8980 |
| }, |
| { |
| "epoch": 5.04, |
| "learning_rate": 2.773938478958294e-05, |
| "loss": 0.4194, |
| "step": 8990 |
| }, |
| { |
| "epoch": 5.05, |
| "learning_rate": 2.773655406680506e-05, |
| "loss": 0.4064, |
| "step": 9000 |
| }, |
| { |
| "epoch": 5.05, |
| "learning_rate": 2.7733723344027176e-05, |
| "loss": 0.4381, |
| "step": 9010 |
| }, |
| { |
| "epoch": 5.06, |
| "learning_rate": 2.7730892621249292e-05, |
| "loss": 0.4143, |
| "step": 9020 |
| }, |
| { |
| "epoch": 5.06, |
| "learning_rate": 2.772806189847141e-05, |
| "loss": 0.4046, |
| "step": 9030 |
| }, |
| { |
| "epoch": 5.07, |
| "learning_rate": 2.7725231175693527e-05, |
| "loss": 0.4132, |
| "step": 9040 |
| }, |
| { |
| "epoch": 5.08, |
| "learning_rate": 2.7722400452915646e-05, |
| "loss": 0.4046, |
| "step": 9050 |
| }, |
| { |
| "epoch": 5.08, |
| "learning_rate": 2.7719569730137762e-05, |
| "loss": 0.4442, |
| "step": 9060 |
| }, |
| { |
| "epoch": 5.09, |
| "learning_rate": 2.7716739007359878e-05, |
| "loss": 0.4467, |
| "step": 9070 |
| }, |
| { |
| "epoch": 5.09, |
| "learning_rate": 2.7713908284581997e-05, |
| "loss": 0.4103, |
| "step": 9080 |
| }, |
| { |
| "epoch": 5.1, |
| "learning_rate": 2.7711077561804116e-05, |
| "loss": 0.4149, |
| "step": 9090 |
| }, |
| { |
| "epoch": 5.1, |
| "learning_rate": 2.7708246839026232e-05, |
| "loss": 0.4086, |
| "step": 9100 |
| }, |
| { |
| "epoch": 5.11, |
| "learning_rate": 2.7705416116248348e-05, |
| "loss": 0.4319, |
| "step": 9110 |
| }, |
| { |
| "epoch": 5.11, |
| "learning_rate": 2.7702585393470467e-05, |
| "loss": 0.4447, |
| "step": 9120 |
| }, |
| { |
| "epoch": 5.12, |
| "learning_rate": 2.7699754670692586e-05, |
| "loss": 0.4159, |
| "step": 9130 |
| }, |
| { |
| "epoch": 5.13, |
| "learning_rate": 2.7696923947914702e-05, |
| "loss": 0.4012, |
| "step": 9140 |
| }, |
| { |
| "epoch": 5.13, |
| "learning_rate": 2.7694093225136818e-05, |
| "loss": 0.4139, |
| "step": 9150 |
| }, |
| { |
| "epoch": 5.14, |
| "learning_rate": 2.7691262502358934e-05, |
| "loss": 0.4315, |
| "step": 9160 |
| }, |
| { |
| "epoch": 5.14, |
| "learning_rate": 2.7688431779581056e-05, |
| "loss": 0.4351, |
| "step": 9170 |
| }, |
| { |
| "epoch": 5.15, |
| "learning_rate": 2.7685601056803172e-05, |
| "loss": 0.414, |
| "step": 9180 |
| }, |
| { |
| "epoch": 5.15, |
| "learning_rate": 2.7682770334025288e-05, |
| "loss": 0.4181, |
| "step": 9190 |
| }, |
| { |
| "epoch": 5.16, |
| "learning_rate": 2.7679939611247404e-05, |
| "loss": 0.4089, |
| "step": 9200 |
| }, |
| { |
| "epoch": 5.17, |
| "learning_rate": 2.7677108888469523e-05, |
| "loss": 0.412, |
| "step": 9210 |
| }, |
| { |
| "epoch": 5.17, |
| "learning_rate": 2.7674278165691642e-05, |
| "loss": 0.4394, |
| "step": 9220 |
| }, |
| { |
| "epoch": 5.18, |
| "learning_rate": 2.7671447442913758e-05, |
| "loss": 0.397, |
| "step": 9230 |
| }, |
| { |
| "epoch": 5.18, |
| "learning_rate": 2.7668616720135874e-05, |
| "loss": 0.3937, |
| "step": 9240 |
| }, |
| { |
| "epoch": 5.19, |
| "learning_rate": 2.7665785997357993e-05, |
| "loss": 0.4115, |
| "step": 9250 |
| }, |
| { |
| "epoch": 5.19, |
| "learning_rate": 2.7662955274580112e-05, |
| "loss": 0.4281, |
| "step": 9260 |
| }, |
| { |
| "epoch": 5.2, |
| "learning_rate": 2.7660124551802228e-05, |
| "loss": 0.4463, |
| "step": 9270 |
| }, |
| { |
| "epoch": 5.2, |
| "learning_rate": 2.7657293829024344e-05, |
| "loss": 0.3956, |
| "step": 9280 |
| }, |
| { |
| "epoch": 5.21, |
| "learning_rate": 2.7654463106246463e-05, |
| "loss": 0.3903, |
| "step": 9290 |
| }, |
| { |
| "epoch": 5.22, |
| "learning_rate": 2.7651632383468582e-05, |
| "loss": 0.3975, |
| "step": 9300 |
| }, |
| { |
| "epoch": 5.22, |
| "learning_rate": 2.7648801660690698e-05, |
| "loss": 0.435, |
| "step": 9310 |
| }, |
| { |
| "epoch": 5.23, |
| "learning_rate": 2.7645970937912814e-05, |
| "loss": 0.4515, |
| "step": 9320 |
| }, |
| { |
| "epoch": 5.23, |
| "learning_rate": 2.7643140215134933e-05, |
| "loss": 0.4026, |
| "step": 9330 |
| }, |
| { |
| "epoch": 5.24, |
| "learning_rate": 2.764030949235705e-05, |
| "loss": 0.4131, |
| "step": 9340 |
| }, |
| { |
| "epoch": 5.24, |
| "learning_rate": 2.7637478769579168e-05, |
| "loss": 0.3948, |
| "step": 9350 |
| }, |
| { |
| "epoch": 5.25, |
| "learning_rate": 2.7634648046801284e-05, |
| "loss": 0.4267, |
| "step": 9360 |
| }, |
| { |
| "epoch": 5.26, |
| "learning_rate": 2.7631817324023403e-05, |
| "loss": 0.4138, |
| "step": 9370 |
| }, |
| { |
| "epoch": 5.26, |
| "learning_rate": 2.762898660124552e-05, |
| "loss": 0.4024, |
| "step": 9380 |
| }, |
| { |
| "epoch": 5.27, |
| "learning_rate": 2.7626155878467638e-05, |
| "loss": 0.3859, |
| "step": 9390 |
| }, |
| { |
| "epoch": 5.27, |
| "learning_rate": 2.7623325155689753e-05, |
| "loss": 0.4213, |
| "step": 9400 |
| }, |
| { |
| "epoch": 5.28, |
| "learning_rate": 2.7620494432911873e-05, |
| "loss": 0.431, |
| "step": 9410 |
| }, |
| { |
| "epoch": 5.28, |
| "learning_rate": 2.761766371013399e-05, |
| "loss": 0.4711, |
| "step": 9420 |
| }, |
| { |
| "epoch": 5.29, |
| "learning_rate": 2.7614832987356104e-05, |
| "loss": 0.4022, |
| "step": 9430 |
| }, |
| { |
| "epoch": 5.29, |
| "learning_rate": 2.7612002264578223e-05, |
| "loss": 0.3964, |
| "step": 9440 |
| }, |
| { |
| "epoch": 5.3, |
| "learning_rate": 2.760917154180034e-05, |
| "loss": 0.4145, |
| "step": 9450 |
| }, |
| { |
| "epoch": 5.31, |
| "learning_rate": 2.760634081902246e-05, |
| "loss": 0.4187, |
| "step": 9460 |
| }, |
| { |
| "epoch": 5.31, |
| "learning_rate": 2.7603510096244574e-05, |
| "loss": 0.4423, |
| "step": 9470 |
| }, |
| { |
| "epoch": 5.32, |
| "learning_rate": 2.7600679373466693e-05, |
| "loss": 0.4006, |
| "step": 9480 |
| }, |
| { |
| "epoch": 5.32, |
| "learning_rate": 2.759784865068881e-05, |
| "loss": 0.4094, |
| "step": 9490 |
| }, |
| { |
| "epoch": 5.33, |
| "learning_rate": 2.759501792791093e-05, |
| "loss": 0.4167, |
| "step": 9500 |
| }, |
| { |
| "epoch": 5.33, |
| "learning_rate": 2.7592187205133044e-05, |
| "loss": 0.4328, |
| "step": 9510 |
| }, |
| { |
| "epoch": 5.34, |
| "learning_rate": 2.758935648235516e-05, |
| "loss": 0.4272, |
| "step": 9520 |
| }, |
| { |
| "epoch": 5.34, |
| "learning_rate": 2.758652575957728e-05, |
| "loss": 0.4173, |
| "step": 9530 |
| }, |
| { |
| "epoch": 5.35, |
| "learning_rate": 2.75836950367994e-05, |
| "loss": 0.4001, |
| "step": 9540 |
| }, |
| { |
| "epoch": 5.36, |
| "learning_rate": 2.7580864314021514e-05, |
| "loss": 0.4148, |
| "step": 9550 |
| }, |
| { |
| "epoch": 5.36, |
| "learning_rate": 2.757803359124363e-05, |
| "loss": 0.4193, |
| "step": 9560 |
| }, |
| { |
| "epoch": 5.37, |
| "learning_rate": 2.757520286846575e-05, |
| "loss": 0.4436, |
| "step": 9570 |
| }, |
| { |
| "epoch": 5.37, |
| "learning_rate": 2.757237214568787e-05, |
| "loss": 0.4159, |
| "step": 9580 |
| }, |
| { |
| "epoch": 5.38, |
| "learning_rate": 2.7569541422909984e-05, |
| "loss": 0.4144, |
| "step": 9590 |
| }, |
| { |
| "epoch": 5.38, |
| "learning_rate": 2.75667107001321e-05, |
| "loss": 0.4137, |
| "step": 9600 |
| }, |
| { |
| "epoch": 5.39, |
| "learning_rate": 2.7563879977354216e-05, |
| "loss": 0.4108, |
| "step": 9610 |
| }, |
| { |
| "epoch": 5.4, |
| "learning_rate": 2.756104925457634e-05, |
| "loss": 0.4248, |
| "step": 9620 |
| }, |
| { |
| "epoch": 5.4, |
| "learning_rate": 2.7558218531798454e-05, |
| "loss": 0.3968, |
| "step": 9630 |
| }, |
| { |
| "epoch": 5.41, |
| "learning_rate": 2.755538780902057e-05, |
| "loss": 0.4038, |
| "step": 9640 |
| }, |
| { |
| "epoch": 5.41, |
| "learning_rate": 2.7552557086242686e-05, |
| "loss": 0.4108, |
| "step": 9650 |
| }, |
| { |
| "epoch": 5.42, |
| "learning_rate": 2.754972636346481e-05, |
| "loss": 0.4258, |
| "step": 9660 |
| }, |
| { |
| "epoch": 5.42, |
| "learning_rate": 2.7546895640686924e-05, |
| "loss": 0.4519, |
| "step": 9670 |
| }, |
| { |
| "epoch": 5.43, |
| "learning_rate": 2.754406491790904e-05, |
| "loss": 0.4132, |
| "step": 9680 |
| }, |
| { |
| "epoch": 5.43, |
| "learning_rate": 2.7541234195131156e-05, |
| "loss": 0.4123, |
| "step": 9690 |
| }, |
| { |
| "epoch": 5.44, |
| "learning_rate": 2.7538403472353275e-05, |
| "loss": 0.4013, |
| "step": 9700 |
| }, |
| { |
| "epoch": 5.45, |
| "learning_rate": 2.7535572749575394e-05, |
| "loss": 0.42, |
| "step": 9710 |
| }, |
| { |
| "epoch": 5.45, |
| "learning_rate": 2.753274202679751e-05, |
| "loss": 0.4051, |
| "step": 9720 |
| }, |
| { |
| "epoch": 5.46, |
| "learning_rate": 2.7529911304019626e-05, |
| "loss": 0.3951, |
| "step": 9730 |
| }, |
| { |
| "epoch": 5.46, |
| "learning_rate": 2.7527080581241745e-05, |
| "loss": 0.3959, |
| "step": 9740 |
| }, |
| { |
| "epoch": 5.47, |
| "learning_rate": 2.7524249858463864e-05, |
| "loss": 0.394, |
| "step": 9750 |
| }, |
| { |
| "epoch": 5.47, |
| "learning_rate": 2.752141913568598e-05, |
| "loss": 0.4166, |
| "step": 9760 |
| }, |
| { |
| "epoch": 5.48, |
| "learning_rate": 2.7518588412908096e-05, |
| "loss": 0.4235, |
| "step": 9770 |
| }, |
| { |
| "epoch": 5.48, |
| "learning_rate": 2.7515757690130215e-05, |
| "loss": 0.3889, |
| "step": 9780 |
| }, |
| { |
| "epoch": 5.49, |
| "learning_rate": 2.751292696735233e-05, |
| "loss": 0.3918, |
| "step": 9790 |
| }, |
| { |
| "epoch": 5.5, |
| "learning_rate": 2.751009624457445e-05, |
| "loss": 0.3981, |
| "step": 9800 |
| }, |
| { |
| "epoch": 5.5, |
| "learning_rate": 2.7507265521796566e-05, |
| "loss": 0.4059, |
| "step": 9810 |
| }, |
| { |
| "epoch": 5.51, |
| "learning_rate": 2.7504717871296475e-05, |
| "loss": 0.4369, |
| "step": 9820 |
| }, |
| { |
| "epoch": 5.51, |
| "learning_rate": 2.750188714851859e-05, |
| "loss": 0.3976, |
| "step": 9830 |
| }, |
| { |
| "epoch": 5.52, |
| "learning_rate": 2.7499056425740706e-05, |
| "loss": 0.4042, |
| "step": 9840 |
| }, |
| { |
| "epoch": 5.52, |
| "learning_rate": 2.7496225702962822e-05, |
| "loss": 0.3924, |
| "step": 9850 |
| }, |
| { |
| "epoch": 5.53, |
| "learning_rate": 2.749339498018494e-05, |
| "loss": 0.4056, |
| "step": 9860 |
| }, |
| { |
| "epoch": 5.54, |
| "learning_rate": 2.749056425740706e-05, |
| "loss": 0.4329, |
| "step": 9870 |
| }, |
| { |
| "epoch": 5.54, |
| "learning_rate": 2.7487733534629176e-05, |
| "loss": 0.3983, |
| "step": 9880 |
| }, |
| { |
| "epoch": 5.55, |
| "learning_rate": 2.7484902811851292e-05, |
| "loss": 0.3905, |
| "step": 9890 |
| }, |
| { |
| "epoch": 5.55, |
| "learning_rate": 2.748207208907341e-05, |
| "loss": 0.3964, |
| "step": 9900 |
| }, |
| { |
| "epoch": 5.56, |
| "learning_rate": 2.747924136629553e-05, |
| "loss": 0.4152, |
| "step": 9910 |
| }, |
| { |
| "epoch": 5.56, |
| "learning_rate": 2.7476410643517646e-05, |
| "loss": 0.4408, |
| "step": 9920 |
| }, |
| { |
| "epoch": 5.57, |
| "learning_rate": 2.7473579920739762e-05, |
| "loss": 0.397, |
| "step": 9930 |
| }, |
| { |
| "epoch": 5.57, |
| "learning_rate": 2.747074919796188e-05, |
| "loss": 0.3947, |
| "step": 9940 |
| }, |
| { |
| "epoch": 5.58, |
| "learning_rate": 2.7467918475183997e-05, |
| "loss": 0.3929, |
| "step": 9950 |
| }, |
| { |
| "epoch": 5.59, |
| "learning_rate": 2.7465087752406116e-05, |
| "loss": 0.4233, |
| "step": 9960 |
| }, |
| { |
| "epoch": 5.59, |
| "learning_rate": 2.7462257029628232e-05, |
| "loss": 0.4108, |
| "step": 9970 |
| }, |
| { |
| "epoch": 5.6, |
| "learning_rate": 2.7459426306850348e-05, |
| "loss": 0.3853, |
| "step": 9980 |
| }, |
| { |
| "epoch": 5.6, |
| "learning_rate": 2.7456595584072467e-05, |
| "loss": 0.3962, |
| "step": 9990 |
| }, |
| { |
| "epoch": 5.61, |
| "learning_rate": 2.7453764861294586e-05, |
| "loss": 0.389, |
| "step": 10000 |
| }, |
| { |
| "epoch": 5.61, |
| "learning_rate": 2.7450934138516702e-05, |
| "loss": 0.4295, |
| "step": 10010 |
| }, |
| { |
| "epoch": 5.62, |
| "learning_rate": 2.7448103415738818e-05, |
| "loss": 0.4272, |
| "step": 10020 |
| }, |
| { |
| "epoch": 5.63, |
| "learning_rate": 2.7445272692960937e-05, |
| "loss": 0.389, |
| "step": 10030 |
| }, |
| { |
| "epoch": 5.63, |
| "learning_rate": 2.7442441970183053e-05, |
| "loss": 0.3932, |
| "step": 10040 |
| }, |
| { |
| "epoch": 5.64, |
| "learning_rate": 2.7439611247405172e-05, |
| "loss": 0.3895, |
| "step": 10050 |
| }, |
| { |
| "epoch": 5.64, |
| "learning_rate": 2.7436780524627288e-05, |
| "loss": 0.4153, |
| "step": 10060 |
| }, |
| { |
| "epoch": 5.65, |
| "learning_rate": 2.7433949801849407e-05, |
| "loss": 0.453, |
| "step": 10070 |
| }, |
| { |
| "epoch": 5.65, |
| "learning_rate": 2.7431119079071523e-05, |
| "loss": 0.3965, |
| "step": 10080 |
| }, |
| { |
| "epoch": 5.66, |
| "learning_rate": 2.7428288356293642e-05, |
| "loss": 0.391, |
| "step": 10090 |
| }, |
| { |
| "epoch": 5.66, |
| "learning_rate": 2.7425457633515758e-05, |
| "loss": 0.3975, |
| "step": 10100 |
| }, |
| { |
| "epoch": 5.67, |
| "learning_rate": 2.7422626910737877e-05, |
| "loss": 0.4228, |
| "step": 10110 |
| }, |
| { |
| "epoch": 5.68, |
| "learning_rate": 2.7419796187959993e-05, |
| "loss": 0.4331, |
| "step": 10120 |
| }, |
| { |
| "epoch": 5.68, |
| "learning_rate": 2.741696546518211e-05, |
| "loss": 0.3947, |
| "step": 10130 |
| }, |
| { |
| "epoch": 5.69, |
| "learning_rate": 2.7414134742404228e-05, |
| "loss": 0.3861, |
| "step": 10140 |
| }, |
| { |
| "epoch": 5.69, |
| "learning_rate": 2.7411304019626347e-05, |
| "loss": 0.3963, |
| "step": 10150 |
| }, |
| { |
| "epoch": 5.7, |
| "learning_rate": 2.7408473296848463e-05, |
| "loss": 0.4255, |
| "step": 10160 |
| }, |
| { |
| "epoch": 5.7, |
| "learning_rate": 2.740564257407058e-05, |
| "loss": 0.4248, |
| "step": 10170 |
| }, |
| { |
| "epoch": 5.71, |
| "learning_rate": 2.7402811851292698e-05, |
| "loss": 0.405, |
| "step": 10180 |
| }, |
| { |
| "epoch": 5.71, |
| "learning_rate": 2.7399981128514817e-05, |
| "loss": 0.3738, |
| "step": 10190 |
| }, |
| { |
| "epoch": 5.72, |
| "learning_rate": 2.7397150405736933e-05, |
| "loss": 0.3934, |
| "step": 10200 |
| }, |
| { |
| "epoch": 5.73, |
| "learning_rate": 2.739431968295905e-05, |
| "loss": 0.4119, |
| "step": 10210 |
| }, |
| { |
| "epoch": 5.73, |
| "learning_rate": 2.7391488960181164e-05, |
| "loss": 0.4365, |
| "step": 10220 |
| }, |
| { |
| "epoch": 5.74, |
| "learning_rate": 2.7388658237403287e-05, |
| "loss": 0.3953, |
| "step": 10230 |
| }, |
| { |
| "epoch": 5.74, |
| "learning_rate": 2.7385827514625403e-05, |
| "loss": 0.3921, |
| "step": 10240 |
| }, |
| { |
| "epoch": 5.75, |
| "learning_rate": 2.738299679184752e-05, |
| "loss": 0.3886, |
| "step": 10250 |
| }, |
| { |
| "epoch": 5.75, |
| "learning_rate": 2.7380166069069634e-05, |
| "loss": 0.4, |
| "step": 10260 |
| }, |
| { |
| "epoch": 5.76, |
| "learning_rate": 2.7377335346291757e-05, |
| "loss": 0.4101, |
| "step": 10270 |
| }, |
| { |
| "epoch": 5.77, |
| "learning_rate": 2.7374504623513873e-05, |
| "loss": 0.3922, |
| "step": 10280 |
| }, |
| { |
| "epoch": 5.77, |
| "learning_rate": 2.737167390073599e-05, |
| "loss": 0.3909, |
| "step": 10290 |
| }, |
| { |
| "epoch": 5.78, |
| "learning_rate": 2.7368843177958104e-05, |
| "loss": 0.3933, |
| "step": 10300 |
| }, |
| { |
| "epoch": 5.78, |
| "learning_rate": 2.7366012455180223e-05, |
| "loss": 0.4048, |
| "step": 10310 |
| }, |
| { |
| "epoch": 5.79, |
| "learning_rate": 2.7363181732402343e-05, |
| "loss": 0.4303, |
| "step": 10320 |
| }, |
| { |
| "epoch": 5.79, |
| "learning_rate": 2.736035100962446e-05, |
| "loss": 0.3846, |
| "step": 10330 |
| }, |
| { |
| "epoch": 5.8, |
| "learning_rate": 2.7357520286846574e-05, |
| "loss": 0.3944, |
| "step": 10340 |
| }, |
| { |
| "epoch": 5.8, |
| "learning_rate": 2.7354689564068693e-05, |
| "loss": 0.3912, |
| "step": 10350 |
| }, |
| { |
| "epoch": 5.81, |
| "learning_rate": 2.7351858841290813e-05, |
| "loss": 0.3997, |
| "step": 10360 |
| }, |
| { |
| "epoch": 5.82, |
| "learning_rate": 2.734902811851293e-05, |
| "loss": 0.4447, |
| "step": 10370 |
| }, |
| { |
| "epoch": 5.82, |
| "learning_rate": 2.7346197395735044e-05, |
| "loss": 0.397, |
| "step": 10380 |
| }, |
| { |
| "epoch": 5.83, |
| "learning_rate": 2.7343366672957163e-05, |
| "loss": 0.4039, |
| "step": 10390 |
| }, |
| { |
| "epoch": 5.83, |
| "learning_rate": 2.734053595017928e-05, |
| "loss": 0.3969, |
| "step": 10400 |
| }, |
| { |
| "epoch": 5.84, |
| "learning_rate": 2.73377052274014e-05, |
| "loss": 0.4115, |
| "step": 10410 |
| }, |
| { |
| "epoch": 5.84, |
| "learning_rate": 2.7334874504623514e-05, |
| "loss": 0.4189, |
| "step": 10420 |
| }, |
| { |
| "epoch": 5.85, |
| "learning_rate": 2.733204378184563e-05, |
| "loss": 0.3892, |
| "step": 10430 |
| }, |
| { |
| "epoch": 5.85, |
| "learning_rate": 2.732921305906775e-05, |
| "loss": 0.4107, |
| "step": 10440 |
| }, |
| { |
| "epoch": 5.86, |
| "learning_rate": 2.732638233628987e-05, |
| "loss": 0.3895, |
| "step": 10450 |
| }, |
| { |
| "epoch": 5.87, |
| "learning_rate": 2.7323551613511984e-05, |
| "loss": 0.4109, |
| "step": 10460 |
| }, |
| { |
| "epoch": 5.87, |
| "learning_rate": 2.73207208907341e-05, |
| "loss": 0.4423, |
| "step": 10470 |
| }, |
| { |
| "epoch": 5.88, |
| "learning_rate": 2.731789016795622e-05, |
| "loss": 0.3871, |
| "step": 10480 |
| }, |
| { |
| "epoch": 5.88, |
| "learning_rate": 2.7315059445178335e-05, |
| "loss": 0.3916, |
| "step": 10490 |
| }, |
| { |
| "epoch": 5.89, |
| "learning_rate": 2.7312228722400454e-05, |
| "loss": 0.3943, |
| "step": 10500 |
| }, |
| { |
| "epoch": 5.89, |
| "learning_rate": 2.730939799962257e-05, |
| "loss": 0.4127, |
| "step": 10510 |
| }, |
| { |
| "epoch": 5.9, |
| "learning_rate": 2.730656727684469e-05, |
| "loss": 0.4276, |
| "step": 10520 |
| }, |
| { |
| "epoch": 5.91, |
| "learning_rate": 2.7303736554066805e-05, |
| "loss": 0.3883, |
| "step": 10530 |
| }, |
| { |
| "epoch": 5.91, |
| "learning_rate": 2.7300905831288924e-05, |
| "loss": 0.4064, |
| "step": 10540 |
| }, |
| { |
| "epoch": 5.92, |
| "learning_rate": 2.729807510851104e-05, |
| "loss": 0.3801, |
| "step": 10550 |
| }, |
| { |
| "epoch": 5.92, |
| "learning_rate": 2.729524438573316e-05, |
| "loss": 0.4265, |
| "step": 10560 |
| }, |
| { |
| "epoch": 5.93, |
| "learning_rate": 2.7292413662955275e-05, |
| "loss": 0.4103, |
| "step": 10570 |
| }, |
| { |
| "epoch": 5.93, |
| "learning_rate": 2.728958294017739e-05, |
| "loss": 0.3911, |
| "step": 10580 |
| }, |
| { |
| "epoch": 5.94, |
| "learning_rate": 2.728675221739951e-05, |
| "loss": 0.3778, |
| "step": 10590 |
| }, |
| { |
| "epoch": 5.94, |
| "learning_rate": 2.728392149462163e-05, |
| "loss": 0.39, |
| "step": 10600 |
| }, |
| { |
| "epoch": 5.95, |
| "learning_rate": 2.7281090771843745e-05, |
| "loss": 0.4157, |
| "step": 10610 |
| }, |
| { |
| "epoch": 5.96, |
| "learning_rate": 2.727826004906586e-05, |
| "loss": 0.4392, |
| "step": 10620 |
| }, |
| { |
| "epoch": 5.96, |
| "learning_rate": 2.727542932628798e-05, |
| "loss": 0.3881, |
| "step": 10630 |
| }, |
| { |
| "epoch": 5.97, |
| "learning_rate": 2.72725986035101e-05, |
| "loss": 0.3937, |
| "step": 10640 |
| }, |
| { |
| "epoch": 5.97, |
| "learning_rate": 2.7269767880732215e-05, |
| "loss": 0.3856, |
| "step": 10650 |
| }, |
| { |
| "epoch": 5.98, |
| "learning_rate": 2.726693715795433e-05, |
| "loss": 0.4084, |
| "step": 10660 |
| }, |
| { |
| "epoch": 5.98, |
| "learning_rate": 2.7264106435176446e-05, |
| "loss": 0.4141, |
| "step": 10670 |
| }, |
| { |
| "epoch": 5.99, |
| "learning_rate": 2.726127571239857e-05, |
| "loss": 0.389, |
| "step": 10680 |
| }, |
| { |
| "epoch": 6.0, |
| "learning_rate": 2.7258444989620685e-05, |
| "loss": 0.4025, |
| "step": 10690 |
| }, |
| { |
| "epoch": 6.0, |
| "eval_cer": 0.18394316677773673, |
| "eval_loss": 0.47715499997138977, |
| "eval_runtime": 311.6581, |
| "eval_samples_per_second": 17.08, |
| "eval_steps_per_second": 4.271, |
| "eval_wer": 0.2581746860137084, |
| "step": 10698 |
| }, |
| { |
| "epoch": 6.0, |
| "learning_rate": 2.72556142668428e-05, |
| "loss": 0.442, |
| "step": 10700 |
| }, |
| { |
| "epoch": 6.01, |
| "learning_rate": 2.7252783544064916e-05, |
| "loss": 0.3762, |
| "step": 10710 |
| }, |
| { |
| "epoch": 6.01, |
| "learning_rate": 2.724995282128704e-05, |
| "loss": 0.3647, |
| "step": 10720 |
| }, |
| { |
| "epoch": 6.02, |
| "learning_rate": 2.7247122098509155e-05, |
| "loss": 0.3779, |
| "step": 10730 |
| }, |
| { |
| "epoch": 6.02, |
| "learning_rate": 2.724429137573127e-05, |
| "loss": 0.3814, |
| "step": 10740 |
| }, |
| { |
| "epoch": 6.03, |
| "learning_rate": 2.7241460652953386e-05, |
| "loss": 0.3956, |
| "step": 10750 |
| }, |
| { |
| "epoch": 6.03, |
| "learning_rate": 2.7238629930175506e-05, |
| "loss": 0.3657, |
| "step": 10760 |
| }, |
| { |
| "epoch": 6.04, |
| "learning_rate": 2.7235799207397625e-05, |
| "loss": 0.3657, |
| "step": 10770 |
| }, |
| { |
| "epoch": 6.05, |
| "learning_rate": 2.723296848461974e-05, |
| "loss": 0.3698, |
| "step": 10780 |
| }, |
| { |
| "epoch": 6.05, |
| "learning_rate": 2.7230137761841856e-05, |
| "loss": 0.3859, |
| "step": 10790 |
| }, |
| { |
| "epoch": 6.06, |
| "learning_rate": 2.7227307039063976e-05, |
| "loss": 0.4125, |
| "step": 10800 |
| }, |
| { |
| "epoch": 6.06, |
| "learning_rate": 2.7224476316286095e-05, |
| "loss": 0.3948, |
| "step": 10810 |
| }, |
| { |
| "epoch": 6.07, |
| "learning_rate": 2.722164559350821e-05, |
| "loss": 0.3704, |
| "step": 10820 |
| }, |
| { |
| "epoch": 6.07, |
| "learning_rate": 2.7218814870730326e-05, |
| "loss": 0.3932, |
| "step": 10830 |
| }, |
| { |
| "epoch": 6.08, |
| "learning_rate": 2.7215984147952442e-05, |
| "loss": 0.3721, |
| "step": 10840 |
| }, |
| { |
| "epoch": 6.09, |
| "learning_rate": 2.721315342517456e-05, |
| "loss": 0.4063, |
| "step": 10850 |
| }, |
| { |
| "epoch": 6.09, |
| "learning_rate": 2.721032270239668e-05, |
| "loss": 0.3709, |
| "step": 10860 |
| }, |
| { |
| "epoch": 6.1, |
| "learning_rate": 2.7207491979618796e-05, |
| "loss": 0.3747, |
| "step": 10870 |
| }, |
| { |
| "epoch": 6.1, |
| "learning_rate": 2.7204661256840912e-05, |
| "loss": 0.3794, |
| "step": 10880 |
| }, |
| { |
| "epoch": 6.11, |
| "learning_rate": 2.720183053406303e-05, |
| "loss": 0.3888, |
| "step": 10890 |
| }, |
| { |
| "epoch": 6.11, |
| "learning_rate": 2.719899981128515e-05, |
| "loss": 0.4038, |
| "step": 10900 |
| }, |
| { |
| "epoch": 6.12, |
| "learning_rate": 2.7196169088507266e-05, |
| "loss": 0.3777, |
| "step": 10910 |
| }, |
| { |
| "epoch": 6.12, |
| "learning_rate": 2.7193338365729382e-05, |
| "loss": 0.3782, |
| "step": 10920 |
| }, |
| { |
| "epoch": 6.13, |
| "learning_rate": 2.71905076429515e-05, |
| "loss": 0.3843, |
| "step": 10930 |
| }, |
| { |
| "epoch": 6.14, |
| "learning_rate": 2.7187676920173617e-05, |
| "loss": 0.3999, |
| "step": 10940 |
| }, |
| { |
| "epoch": 6.14, |
| "learning_rate": 2.7184846197395736e-05, |
| "loss": 0.4224, |
| "step": 10950 |
| }, |
| { |
| "epoch": 6.15, |
| "learning_rate": 2.7182015474617852e-05, |
| "loss": 0.3842, |
| "step": 10960 |
| }, |
| { |
| "epoch": 6.15, |
| "learning_rate": 2.717918475183997e-05, |
| "loss": 0.3783, |
| "step": 10970 |
| }, |
| { |
| "epoch": 6.16, |
| "learning_rate": 2.7176354029062087e-05, |
| "loss": 0.3637, |
| "step": 10980 |
| }, |
| { |
| "epoch": 6.16, |
| "learning_rate": 2.7173523306284206e-05, |
| "loss": 0.3855, |
| "step": 10990 |
| }, |
| { |
| "epoch": 6.17, |
| "learning_rate": 2.7170692583506322e-05, |
| "loss": 0.4101, |
| "step": 11000 |
| }, |
| { |
| "epoch": 6.17, |
| "learning_rate": 2.716786186072844e-05, |
| "loss": 0.3824, |
| "step": 11010 |
| }, |
| { |
| "epoch": 6.18, |
| "learning_rate": 2.7165031137950557e-05, |
| "loss": 0.3701, |
| "step": 11020 |
| }, |
| { |
| "epoch": 6.19, |
| "learning_rate": 2.7162200415172673e-05, |
| "loss": 0.3718, |
| "step": 11030 |
| }, |
| { |
| "epoch": 6.19, |
| "learning_rate": 2.7159369692394792e-05, |
| "loss": 0.4079, |
| "step": 11040 |
| }, |
| { |
| "epoch": 6.2, |
| "learning_rate": 2.715653896961691e-05, |
| "loss": 0.4152, |
| "step": 11050 |
| }, |
| { |
| "epoch": 6.2, |
| "learning_rate": 2.7153708246839027e-05, |
| "loss": 0.385, |
| "step": 11060 |
| }, |
| { |
| "epoch": 6.21, |
| "learning_rate": 2.7150877524061143e-05, |
| "loss": 0.3868, |
| "step": 11070 |
| }, |
| { |
| "epoch": 6.21, |
| "learning_rate": 2.7148046801283262e-05, |
| "loss": 0.3846, |
| "step": 11080 |
| }, |
| { |
| "epoch": 6.22, |
| "learning_rate": 2.714521607850538e-05, |
| "loss": 0.3877, |
| "step": 11090 |
| }, |
| { |
| "epoch": 6.23, |
| "learning_rate": 2.7142385355727497e-05, |
| "loss": 0.4156, |
| "step": 11100 |
| }, |
| { |
| "epoch": 6.23, |
| "learning_rate": 2.7139554632949613e-05, |
| "loss": 0.3668, |
| "step": 11110 |
| }, |
| { |
| "epoch": 6.24, |
| "learning_rate": 2.713672391017173e-05, |
| "loss": 0.384, |
| "step": 11120 |
| }, |
| { |
| "epoch": 6.24, |
| "learning_rate": 2.713389318739385e-05, |
| "loss": 0.3722, |
| "step": 11130 |
| }, |
| { |
| "epoch": 6.25, |
| "learning_rate": 2.7131062464615967e-05, |
| "loss": 0.3836, |
| "step": 11140 |
| }, |
| { |
| "epoch": 6.25, |
| "learning_rate": 2.7128231741838083e-05, |
| "loss": 0.3949, |
| "step": 11150 |
| }, |
| { |
| "epoch": 6.26, |
| "learning_rate": 2.71254010190602e-05, |
| "loss": 0.387, |
| "step": 11160 |
| }, |
| { |
| "epoch": 6.26, |
| "learning_rate": 2.712257029628232e-05, |
| "loss": 0.3773, |
| "step": 11170 |
| }, |
| { |
| "epoch": 6.27, |
| "learning_rate": 2.7119739573504437e-05, |
| "loss": 0.3749, |
| "step": 11180 |
| }, |
| { |
| "epoch": 6.28, |
| "learning_rate": 2.7116908850726553e-05, |
| "loss": 0.3863, |
| "step": 11190 |
| }, |
| { |
| "epoch": 6.28, |
| "learning_rate": 2.711407812794867e-05, |
| "loss": 0.4273, |
| "step": 11200 |
| }, |
| { |
| "epoch": 6.29, |
| "learning_rate": 2.7111247405170788e-05, |
| "loss": 0.3846, |
| "step": 11210 |
| }, |
| { |
| "epoch": 6.29, |
| "learning_rate": 2.7108416682392907e-05, |
| "loss": 0.3729, |
| "step": 11220 |
| }, |
| { |
| "epoch": 6.3, |
| "learning_rate": 2.7105585959615023e-05, |
| "loss": 0.3737, |
| "step": 11230 |
| }, |
| { |
| "epoch": 6.3, |
| "learning_rate": 2.710275523683714e-05, |
| "loss": 0.3936, |
| "step": 11240 |
| }, |
| { |
| "epoch": 6.31, |
| "learning_rate": 2.7099924514059254e-05, |
| "loss": 0.4225, |
| "step": 11250 |
| }, |
| { |
| "epoch": 6.32, |
| "learning_rate": 2.7097093791281377e-05, |
| "loss": 0.3692, |
| "step": 11260 |
| }, |
| { |
| "epoch": 6.32, |
| "learning_rate": 2.7094263068503493e-05, |
| "loss": 0.3743, |
| "step": 11270 |
| }, |
| { |
| "epoch": 6.33, |
| "learning_rate": 2.709143234572561e-05, |
| "loss": 0.3739, |
| "step": 11280 |
| }, |
| { |
| "epoch": 6.33, |
| "learning_rate": 2.7088601622947724e-05, |
| "loss": 0.3886, |
| "step": 11290 |
| }, |
| { |
| "epoch": 6.34, |
| "learning_rate": 2.7085770900169843e-05, |
| "loss": 0.4193, |
| "step": 11300 |
| }, |
| { |
| "epoch": 6.34, |
| "learning_rate": 2.7082940177391963e-05, |
| "loss": 0.3494, |
| "step": 11310 |
| }, |
| { |
| "epoch": 6.35, |
| "learning_rate": 2.708010945461408e-05, |
| "loss": 0.38, |
| "step": 11320 |
| }, |
| { |
| "epoch": 6.35, |
| "learning_rate": 2.7077278731836194e-05, |
| "loss": 0.3799, |
| "step": 11330 |
| }, |
| { |
| "epoch": 6.36, |
| "learning_rate": 2.7074448009058313e-05, |
| "loss": 0.3865, |
| "step": 11340 |
| }, |
| { |
| "epoch": 6.37, |
| "learning_rate": 2.7071617286280433e-05, |
| "loss": 0.4189, |
| "step": 11350 |
| }, |
| { |
| "epoch": 6.37, |
| "learning_rate": 2.706878656350255e-05, |
| "loss": 0.3858, |
| "step": 11360 |
| }, |
| { |
| "epoch": 6.38, |
| "learning_rate": 2.7065955840724664e-05, |
| "loss": 0.3705, |
| "step": 11370 |
| }, |
| { |
| "epoch": 6.38, |
| "learning_rate": 2.7063125117946783e-05, |
| "loss": 0.3697, |
| "step": 11380 |
| }, |
| { |
| "epoch": 6.39, |
| "learning_rate": 2.70602943951689e-05, |
| "loss": 0.3872, |
| "step": 11390 |
| }, |
| { |
| "epoch": 6.39, |
| "learning_rate": 2.705746367239102e-05, |
| "loss": 0.4224, |
| "step": 11400 |
| }, |
| { |
| "epoch": 6.4, |
| "learning_rate": 2.7054632949613134e-05, |
| "loss": 0.3816, |
| "step": 11410 |
| }, |
| { |
| "epoch": 6.4, |
| "learning_rate": 2.7051802226835253e-05, |
| "loss": 0.3628, |
| "step": 11420 |
| }, |
| { |
| "epoch": 6.41, |
| "learning_rate": 2.704897150405737e-05, |
| "loss": 0.3752, |
| "step": 11430 |
| }, |
| { |
| "epoch": 6.42, |
| "learning_rate": 2.704614078127949e-05, |
| "loss": 0.387, |
| "step": 11440 |
| }, |
| { |
| "epoch": 6.42, |
| "learning_rate": 2.7043310058501604e-05, |
| "loss": 0.4164, |
| "step": 11450 |
| }, |
| { |
| "epoch": 6.43, |
| "learning_rate": 2.7040479335723723e-05, |
| "loss": 0.3797, |
| "step": 11460 |
| }, |
| { |
| "epoch": 6.43, |
| "learning_rate": 2.703764861294584e-05, |
| "loss": 0.367, |
| "step": 11470 |
| }, |
| { |
| "epoch": 6.44, |
| "learning_rate": 2.7034817890167955e-05, |
| "loss": 0.3735, |
| "step": 11480 |
| }, |
| { |
| "epoch": 6.44, |
| "learning_rate": 2.7031987167390074e-05, |
| "loss": 0.3716, |
| "step": 11490 |
| }, |
| { |
| "epoch": 6.45, |
| "learning_rate": 2.7029156444612193e-05, |
| "loss": 0.4151, |
| "step": 11500 |
| }, |
| { |
| "epoch": 6.46, |
| "learning_rate": 2.702632572183431e-05, |
| "loss": 0.376, |
| "step": 11510 |
| }, |
| { |
| "epoch": 6.46, |
| "learning_rate": 2.7023494999056425e-05, |
| "loss": 0.3706, |
| "step": 11520 |
| }, |
| { |
| "epoch": 6.47, |
| "learning_rate": 2.7020664276278544e-05, |
| "loss": 0.3489, |
| "step": 11530 |
| }, |
| { |
| "epoch": 6.47, |
| "learning_rate": 2.7017833553500663e-05, |
| "loss": 0.3776, |
| "step": 11540 |
| }, |
| { |
| "epoch": 6.48, |
| "learning_rate": 2.701500283072278e-05, |
| "loss": 0.4213, |
| "step": 11550 |
| }, |
| { |
| "epoch": 6.48, |
| "learning_rate": 2.7012172107944895e-05, |
| "loss": 0.3818, |
| "step": 11560 |
| }, |
| { |
| "epoch": 6.49, |
| "learning_rate": 2.700934138516701e-05, |
| "loss": 0.3798, |
| "step": 11570 |
| }, |
| { |
| "epoch": 6.49, |
| "learning_rate": 2.7006510662389133e-05, |
| "loss": 0.3707, |
| "step": 11580 |
| }, |
| { |
| "epoch": 6.5, |
| "learning_rate": 2.700367993961125e-05, |
| "loss": 0.3872, |
| "step": 11590 |
| }, |
| { |
| "epoch": 6.51, |
| "learning_rate": 2.7000849216833365e-05, |
| "loss": 0.4036, |
| "step": 11600 |
| }, |
| { |
| "epoch": 6.51, |
| "learning_rate": 2.699801849405548e-05, |
| "loss": 0.3717, |
| "step": 11610 |
| }, |
| { |
| "epoch": 6.52, |
| "learning_rate": 2.6995187771277603e-05, |
| "loss": 0.3662, |
| "step": 11620 |
| }, |
| { |
| "epoch": 6.52, |
| "learning_rate": 2.699235704849972e-05, |
| "loss": 0.3625, |
| "step": 11630 |
| }, |
| { |
| "epoch": 6.53, |
| "learning_rate": 2.6989526325721835e-05, |
| "loss": 0.37, |
| "step": 11640 |
| }, |
| { |
| "epoch": 6.53, |
| "learning_rate": 2.698669560294395e-05, |
| "loss": 0.3965, |
| "step": 11650 |
| }, |
| { |
| "epoch": 6.54, |
| "learning_rate": 2.6983864880166067e-05, |
| "loss": 0.3619, |
| "step": 11660 |
| }, |
| { |
| "epoch": 6.54, |
| "learning_rate": 2.698103415738819e-05, |
| "loss": 0.3791, |
| "step": 11670 |
| }, |
| { |
| "epoch": 6.55, |
| "learning_rate": 2.6978203434610305e-05, |
| "loss": 0.3674, |
| "step": 11680 |
| }, |
| { |
| "epoch": 6.56, |
| "learning_rate": 2.697537271183242e-05, |
| "loss": 0.3722, |
| "step": 11690 |
| }, |
| { |
| "epoch": 6.56, |
| "learning_rate": 2.6972541989054536e-05, |
| "loss": 0.4023, |
| "step": 11700 |
| }, |
| { |
| "epoch": 6.57, |
| "learning_rate": 2.696971126627666e-05, |
| "loss": 0.3692, |
| "step": 11710 |
| }, |
| { |
| "epoch": 6.57, |
| "learning_rate": 2.6966880543498775e-05, |
| "loss": 0.3766, |
| "step": 11720 |
| }, |
| { |
| "epoch": 6.58, |
| "learning_rate": 2.696404982072089e-05, |
| "loss": 0.3694, |
| "step": 11730 |
| }, |
| { |
| "epoch": 6.58, |
| "learning_rate": 2.6961219097943006e-05, |
| "loss": 0.375, |
| "step": 11740 |
| }, |
| { |
| "epoch": 6.59, |
| "learning_rate": 2.6958388375165126e-05, |
| "loss": 0.3856, |
| "step": 11750 |
| }, |
| { |
| "epoch": 6.6, |
| "learning_rate": 2.6955557652387245e-05, |
| "loss": 0.3688, |
| "step": 11760 |
| }, |
| { |
| "epoch": 6.6, |
| "learning_rate": 2.695272692960936e-05, |
| "loss": 0.3646, |
| "step": 11770 |
| }, |
| { |
| "epoch": 6.61, |
| "learning_rate": 2.6949896206831476e-05, |
| "loss": 0.3631, |
| "step": 11780 |
| }, |
| { |
| "epoch": 6.61, |
| "learning_rate": 2.6947065484053596e-05, |
| "loss": 0.3856, |
| "step": 11790 |
| }, |
| { |
| "epoch": 6.62, |
| "learning_rate": 2.6944234761275715e-05, |
| "loss": 0.3965, |
| "step": 11800 |
| }, |
| { |
| "epoch": 6.62, |
| "learning_rate": 2.694140403849783e-05, |
| "loss": 0.3629, |
| "step": 11810 |
| }, |
| { |
| "epoch": 6.63, |
| "learning_rate": 2.6938573315719946e-05, |
| "loss": 0.3726, |
| "step": 11820 |
| }, |
| { |
| "epoch": 6.63, |
| "learning_rate": 2.6935742592942066e-05, |
| "loss": 0.3733, |
| "step": 11830 |
| }, |
| { |
| "epoch": 6.64, |
| "learning_rate": 2.693291187016418e-05, |
| "loss": 0.3756, |
| "step": 11840 |
| }, |
| { |
| "epoch": 6.65, |
| "learning_rate": 2.69300811473863e-05, |
| "loss": 0.3934, |
| "step": 11850 |
| }, |
| { |
| "epoch": 6.65, |
| "learning_rate": 2.6927250424608416e-05, |
| "loss": 0.3679, |
| "step": 11860 |
| }, |
| { |
| "epoch": 6.66, |
| "learning_rate": 2.6924419701830536e-05, |
| "loss": 0.3488, |
| "step": 11870 |
| }, |
| { |
| "epoch": 6.66, |
| "learning_rate": 2.692158897905265e-05, |
| "loss": 0.3611, |
| "step": 11880 |
| }, |
| { |
| "epoch": 6.67, |
| "learning_rate": 2.691875825627477e-05, |
| "loss": 0.372, |
| "step": 11890 |
| }, |
| { |
| "epoch": 6.67, |
| "learning_rate": 2.6915927533496886e-05, |
| "loss": 0.3895, |
| "step": 11900 |
| }, |
| { |
| "epoch": 6.68, |
| "learning_rate": 2.6913096810719006e-05, |
| "loss": 0.3695, |
| "step": 11910 |
| }, |
| { |
| "epoch": 6.69, |
| "learning_rate": 2.691026608794112e-05, |
| "loss": 0.3659, |
| "step": 11920 |
| }, |
| { |
| "epoch": 6.69, |
| "learning_rate": 2.6907435365163237e-05, |
| "loss": 0.3924, |
| "step": 11930 |
| }, |
| { |
| "epoch": 6.7, |
| "learning_rate": 2.6904604642385356e-05, |
| "loss": 0.3675, |
| "step": 11940 |
| }, |
| { |
| "epoch": 6.7, |
| "learning_rate": 2.6901773919607476e-05, |
| "loss": 0.426, |
| "step": 11950 |
| }, |
| { |
| "epoch": 6.71, |
| "learning_rate": 2.689894319682959e-05, |
| "loss": 0.3792, |
| "step": 11960 |
| }, |
| { |
| "epoch": 6.71, |
| "learning_rate": 2.6896112474051707e-05, |
| "loss": 0.3732, |
| "step": 11970 |
| }, |
| { |
| "epoch": 6.72, |
| "learning_rate": 2.6893281751273826e-05, |
| "loss": 0.369, |
| "step": 11980 |
| }, |
| { |
| "epoch": 6.72, |
| "learning_rate": 2.6890451028495946e-05, |
| "loss": 0.3884, |
| "step": 11990 |
| }, |
| { |
| "epoch": 6.73, |
| "learning_rate": 2.688762030571806e-05, |
| "loss": 0.4104, |
| "step": 12000 |
| }, |
| { |
| "epoch": 6.74, |
| "learning_rate": 2.6884789582940177e-05, |
| "loss": 0.3795, |
| "step": 12010 |
| }, |
| { |
| "epoch": 6.74, |
| "learning_rate": 2.6881958860162293e-05, |
| "loss": 0.3741, |
| "step": 12020 |
| }, |
| { |
| "epoch": 6.75, |
| "learning_rate": 2.6879128137384415e-05, |
| "loss": 0.3693, |
| "step": 12030 |
| }, |
| { |
| "epoch": 6.75, |
| "learning_rate": 2.687629741460653e-05, |
| "loss": 0.3923, |
| "step": 12040 |
| }, |
| { |
| "epoch": 6.76, |
| "learning_rate": 2.6873466691828647e-05, |
| "loss": 0.4137, |
| "step": 12050 |
| }, |
| { |
| "epoch": 6.76, |
| "learning_rate": 2.6870635969050763e-05, |
| "loss": 0.3611, |
| "step": 12060 |
| }, |
| { |
| "epoch": 6.77, |
| "learning_rate": 2.6867805246272882e-05, |
| "loss": 0.3719, |
| "step": 12070 |
| }, |
| { |
| "epoch": 6.77, |
| "learning_rate": 2.6864974523495e-05, |
| "loss": 0.3776, |
| "step": 12080 |
| }, |
| { |
| "epoch": 6.78, |
| "learning_rate": 2.6862143800717117e-05, |
| "loss": 0.375, |
| "step": 12090 |
| }, |
| { |
| "epoch": 6.79, |
| "learning_rate": 2.6859313077939233e-05, |
| "loss": 0.4036, |
| "step": 12100 |
| }, |
| { |
| "epoch": 6.79, |
| "learning_rate": 2.685648235516135e-05, |
| "loss": 0.3583, |
| "step": 12110 |
| }, |
| { |
| "epoch": 6.8, |
| "learning_rate": 2.685365163238347e-05, |
| "loss": 0.3752, |
| "step": 12120 |
| }, |
| { |
| "epoch": 6.8, |
| "learning_rate": 2.6850820909605587e-05, |
| "loss": 0.3744, |
| "step": 12130 |
| }, |
| { |
| "epoch": 6.81, |
| "learning_rate": 2.6847990186827703e-05, |
| "loss": 0.3716, |
| "step": 12140 |
| }, |
| { |
| "epoch": 6.81, |
| "learning_rate": 2.684515946404982e-05, |
| "loss": 0.3991, |
| "step": 12150 |
| }, |
| { |
| "epoch": 6.82, |
| "learning_rate": 2.684232874127194e-05, |
| "loss": 0.365, |
| "step": 12160 |
| }, |
| { |
| "epoch": 6.83, |
| "learning_rate": 2.6839498018494057e-05, |
| "loss": 0.3655, |
| "step": 12170 |
| }, |
| { |
| "epoch": 6.83, |
| "learning_rate": 2.6836667295716173e-05, |
| "loss": 0.3726, |
| "step": 12180 |
| }, |
| { |
| "epoch": 6.84, |
| "learning_rate": 2.683383657293829e-05, |
| "loss": 0.3856, |
| "step": 12190 |
| }, |
| { |
| "epoch": 6.84, |
| "learning_rate": 2.6831005850160408e-05, |
| "loss": 0.4018, |
| "step": 12200 |
| }, |
| { |
| "epoch": 6.85, |
| "learning_rate": 2.6828175127382527e-05, |
| "loss": 0.3564, |
| "step": 12210 |
| }, |
| { |
| "epoch": 6.85, |
| "learning_rate": 2.6825344404604643e-05, |
| "loss": 0.3788, |
| "step": 12220 |
| }, |
| { |
| "epoch": 6.86, |
| "learning_rate": 2.682251368182676e-05, |
| "loss": 0.3693, |
| "step": 12230 |
| }, |
| { |
| "epoch": 6.86, |
| "learning_rate": 2.6819682959048878e-05, |
| "loss": 0.3844, |
| "step": 12240 |
| }, |
| { |
| "epoch": 6.87, |
| "learning_rate": 2.6816852236270997e-05, |
| "loss": 0.4194, |
| "step": 12250 |
| }, |
| { |
| "epoch": 6.88, |
| "learning_rate": 2.6814021513493113e-05, |
| "loss": 0.3764, |
| "step": 12260 |
| }, |
| { |
| "epoch": 6.88, |
| "learning_rate": 2.681119079071523e-05, |
| "loss": 0.3692, |
| "step": 12270 |
| }, |
| { |
| "epoch": 6.89, |
| "learning_rate": 2.6808360067937348e-05, |
| "loss": 0.3716, |
| "step": 12280 |
| }, |
| { |
| "epoch": 6.89, |
| "learning_rate": 2.6805529345159464e-05, |
| "loss": 0.3717, |
| "step": 12290 |
| }, |
| { |
| "epoch": 6.9, |
| "learning_rate": 2.6802698622381583e-05, |
| "loss": 0.4131, |
| "step": 12300 |
| }, |
| { |
| "epoch": 6.9, |
| "learning_rate": 2.67998678996037e-05, |
| "loss": 0.3674, |
| "step": 12310 |
| }, |
| { |
| "epoch": 6.91, |
| "learning_rate": 2.6797037176825818e-05, |
| "loss": 0.3755, |
| "step": 12320 |
| }, |
| { |
| "epoch": 6.91, |
| "learning_rate": 2.6794206454047934e-05, |
| "loss": 0.3678, |
| "step": 12330 |
| }, |
| { |
| "epoch": 6.92, |
| "learning_rate": 2.6791375731270053e-05, |
| "loss": 0.3727, |
| "step": 12340 |
| }, |
| { |
| "epoch": 6.93, |
| "learning_rate": 2.678854500849217e-05, |
| "loss": 0.4011, |
| "step": 12350 |
| }, |
| { |
| "epoch": 6.93, |
| "learning_rate": 2.6785714285714288e-05, |
| "loss": 0.3694, |
| "step": 12360 |
| }, |
| { |
| "epoch": 6.94, |
| "learning_rate": 2.6782883562936404e-05, |
| "loss": 0.3621, |
| "step": 12370 |
| }, |
| { |
| "epoch": 6.94, |
| "learning_rate": 2.678005284015852e-05, |
| "loss": 0.3503, |
| "step": 12380 |
| }, |
| { |
| "epoch": 6.95, |
| "learning_rate": 2.677722211738064e-05, |
| "loss": 0.3696, |
| "step": 12390 |
| }, |
| { |
| "epoch": 6.95, |
| "learning_rate": 2.6774391394602758e-05, |
| "loss": 0.3965, |
| "step": 12400 |
| }, |
| { |
| "epoch": 6.96, |
| "learning_rate": 2.6771560671824874e-05, |
| "loss": 0.3668, |
| "step": 12410 |
| }, |
| { |
| "epoch": 6.97, |
| "learning_rate": 2.676872994904699e-05, |
| "loss": 0.3609, |
| "step": 12420 |
| }, |
| { |
| "epoch": 6.97, |
| "learning_rate": 2.676589922626911e-05, |
| "loss": 0.356, |
| "step": 12430 |
| }, |
| { |
| "epoch": 6.98, |
| "learning_rate": 2.6763068503491228e-05, |
| "loss": 0.3642, |
| "step": 12440 |
| }, |
| { |
| "epoch": 6.98, |
| "learning_rate": 2.6760237780713343e-05, |
| "loss": 0.4102, |
| "step": 12450 |
| }, |
| { |
| "epoch": 6.99, |
| "learning_rate": 2.675740705793546e-05, |
| "loss": 0.3741, |
| "step": 12460 |
| }, |
| { |
| "epoch": 6.99, |
| "learning_rate": 2.6754576335157575e-05, |
| "loss": 0.3658, |
| "step": 12470 |
| }, |
| { |
| "epoch": 7.0, |
| "learning_rate": 2.6751745612379694e-05, |
| "loss": 0.3841, |
| "step": 12480 |
| }, |
| { |
| "epoch": 7.0, |
| "eval_cer": 0.18020731428868442, |
| "eval_loss": 0.45836693048477173, |
| "eval_runtime": 314.6232, |
| "eval_samples_per_second": 16.919, |
| "eval_steps_per_second": 4.23, |
| "eval_wer": 0.24952215190705426, |
| "step": 12481 |
| }, |
| { |
| "epoch": 7.01, |
| "learning_rate": 2.6748914889601813e-05, |
| "loss": 0.398, |
| "step": 12490 |
| }, |
| { |
| "epoch": 7.01, |
| "learning_rate": 2.674608416682393e-05, |
| "loss": 0.3523, |
| "step": 12500 |
| }, |
| { |
| "epoch": 7.02, |
| "learning_rate": 2.6743253444046045e-05, |
| "loss": 0.3563, |
| "step": 12510 |
| }, |
| { |
| "epoch": 7.02, |
| "learning_rate": 2.6740422721268164e-05, |
| "loss": 0.3716, |
| "step": 12520 |
| }, |
| { |
| "epoch": 7.03, |
| "learning_rate": 2.6737591998490283e-05, |
| "loss": 0.371, |
| "step": 12530 |
| }, |
| { |
| "epoch": 7.03, |
| "learning_rate": 2.67347612757124e-05, |
| "loss": 0.357, |
| "step": 12540 |
| }, |
| { |
| "epoch": 7.04, |
| "learning_rate": 2.6731930552934515e-05, |
| "loss": 0.3634, |
| "step": 12550 |
| }, |
| { |
| "epoch": 7.04, |
| "learning_rate": 2.672909983015663e-05, |
| "loss": 0.3517, |
| "step": 12560 |
| }, |
| { |
| "epoch": 7.05, |
| "learning_rate": 2.6726269107378753e-05, |
| "loss": 0.347, |
| "step": 12570 |
| }, |
| { |
| "epoch": 7.06, |
| "learning_rate": 2.672343838460087e-05, |
| "loss": 0.3745, |
| "step": 12580 |
| }, |
| { |
| "epoch": 7.06, |
| "learning_rate": 2.6720607661822985e-05, |
| "loss": 0.3655, |
| "step": 12590 |
| }, |
| { |
| "epoch": 7.07, |
| "learning_rate": 2.67177769390451e-05, |
| "loss": 0.3575, |
| "step": 12600 |
| }, |
| { |
| "epoch": 7.07, |
| "learning_rate": 2.6714946216267223e-05, |
| "loss": 0.3464, |
| "step": 12610 |
| }, |
| { |
| "epoch": 7.08, |
| "learning_rate": 2.671211549348934e-05, |
| "loss": 0.3589, |
| "step": 12620 |
| }, |
| { |
| "epoch": 7.08, |
| "learning_rate": 2.6709284770711455e-05, |
| "loss": 0.3693, |
| "step": 12630 |
| }, |
| { |
| "epoch": 7.09, |
| "learning_rate": 2.670645404793357e-05, |
| "loss": 0.3625, |
| "step": 12640 |
| }, |
| { |
| "epoch": 7.09, |
| "learning_rate": 2.670362332515569e-05, |
| "loss": 0.3651, |
| "step": 12650 |
| }, |
| { |
| "epoch": 7.1, |
| "learning_rate": 2.670079260237781e-05, |
| "loss": 0.3498, |
| "step": 12660 |
| }, |
| { |
| "epoch": 7.11, |
| "learning_rate": 2.6697961879599925e-05, |
| "loss": 0.3602, |
| "step": 12670 |
| }, |
| { |
| "epoch": 7.11, |
| "learning_rate": 2.669513115682204e-05, |
| "loss": 0.3779, |
| "step": 12680 |
| }, |
| { |
| "epoch": 7.12, |
| "learning_rate": 2.669230043404416e-05, |
| "loss": 0.3497, |
| "step": 12690 |
| }, |
| { |
| "epoch": 7.12, |
| "learning_rate": 2.668946971126628e-05, |
| "loss": 0.3463, |
| "step": 12700 |
| }, |
| { |
| "epoch": 7.13, |
| "learning_rate": 2.6686638988488395e-05, |
| "loss": 0.3512, |
| "step": 12710 |
| }, |
| { |
| "epoch": 7.13, |
| "learning_rate": 2.668380826571051e-05, |
| "loss": 0.3633, |
| "step": 12720 |
| }, |
| { |
| "epoch": 7.14, |
| "learning_rate": 2.668097754293263e-05, |
| "loss": 0.3666, |
| "step": 12730 |
| }, |
| { |
| "epoch": 7.15, |
| "learning_rate": 2.6678146820154746e-05, |
| "loss": 0.3521, |
| "step": 12740 |
| }, |
| { |
| "epoch": 7.15, |
| "learning_rate": 2.6675316097376865e-05, |
| "loss": 0.3644, |
| "step": 12750 |
| }, |
| { |
| "epoch": 7.16, |
| "learning_rate": 2.667248537459898e-05, |
| "loss": 0.343, |
| "step": 12760 |
| }, |
| { |
| "epoch": 7.16, |
| "learning_rate": 2.66696546518211e-05, |
| "loss": 0.3448, |
| "step": 12770 |
| }, |
| { |
| "epoch": 7.17, |
| "learning_rate": 2.6666823929043216e-05, |
| "loss": 0.3939, |
| "step": 12780 |
| }, |
| { |
| "epoch": 7.17, |
| "learning_rate": 2.6663993206265335e-05, |
| "loss": 0.3521, |
| "step": 12790 |
| }, |
| { |
| "epoch": 7.18, |
| "learning_rate": 2.666116248348745e-05, |
| "loss": 0.3633, |
| "step": 12800 |
| }, |
| { |
| "epoch": 7.18, |
| "learning_rate": 2.665833176070957e-05, |
| "loss": 0.3569, |
| "step": 12810 |
| }, |
| { |
| "epoch": 7.19, |
| "learning_rate": 2.6655501037931686e-05, |
| "loss": 0.3519, |
| "step": 12820 |
| }, |
| { |
| "epoch": 7.2, |
| "learning_rate": 2.66526703151538e-05, |
| "loss": 0.3719, |
| "step": 12830 |
| }, |
| { |
| "epoch": 7.2, |
| "learning_rate": 2.664983959237592e-05, |
| "loss": 0.3642, |
| "step": 12840 |
| }, |
| { |
| "epoch": 7.21, |
| "learning_rate": 2.664700886959804e-05, |
| "loss": 0.3483, |
| "step": 12850 |
| }, |
| { |
| "epoch": 7.21, |
| "learning_rate": 2.6644178146820156e-05, |
| "loss": 0.3512, |
| "step": 12860 |
| }, |
| { |
| "epoch": 7.22, |
| "learning_rate": 2.664134742404227e-05, |
| "loss": 0.3544, |
| "step": 12870 |
| }, |
| { |
| "epoch": 7.22, |
| "learning_rate": 2.663851670126439e-05, |
| "loss": 0.3696, |
| "step": 12880 |
| }, |
| { |
| "epoch": 7.23, |
| "learning_rate": 2.663568597848651e-05, |
| "loss": 0.3723, |
| "step": 12890 |
| }, |
| { |
| "epoch": 7.23, |
| "learning_rate": 2.6632855255708626e-05, |
| "loss": 0.3432, |
| "step": 12900 |
| }, |
| { |
| "epoch": 7.24, |
| "learning_rate": 2.663002453293074e-05, |
| "loss": 0.3539, |
| "step": 12910 |
| }, |
| { |
| "epoch": 7.25, |
| "learning_rate": 2.6627193810152857e-05, |
| "loss": 0.353, |
| "step": 12920 |
| }, |
| { |
| "epoch": 7.25, |
| "learning_rate": 2.6624363087374976e-05, |
| "loss": 0.373, |
| "step": 12930 |
| }, |
| { |
| "epoch": 7.26, |
| "learning_rate": 2.6621532364597096e-05, |
| "loss": 0.3483, |
| "step": 12940 |
| }, |
| { |
| "epoch": 7.26, |
| "learning_rate": 2.661870164181921e-05, |
| "loss": 0.3491, |
| "step": 12950 |
| }, |
| { |
| "epoch": 7.27, |
| "learning_rate": 2.6615870919041327e-05, |
| "loss": 0.3522, |
| "step": 12960 |
| }, |
| { |
| "epoch": 7.27, |
| "learning_rate": 2.6613040196263446e-05, |
| "loss": 0.3712, |
| "step": 12970 |
| }, |
| { |
| "epoch": 7.28, |
| "learning_rate": 2.6610209473485566e-05, |
| "loss": 0.3719, |
| "step": 12980 |
| }, |
| { |
| "epoch": 7.29, |
| "learning_rate": 2.660737875070768e-05, |
| "loss": 0.3639, |
| "step": 12990 |
| }, |
| { |
| "epoch": 7.29, |
| "learning_rate": 2.6604548027929797e-05, |
| "loss": 0.3497, |
| "step": 13000 |
| }, |
| { |
| "epoch": 7.3, |
| "learning_rate": 2.6601717305151913e-05, |
| "loss": 0.3549, |
| "step": 13010 |
| }, |
| { |
| "epoch": 7.3, |
| "learning_rate": 2.6598886582374036e-05, |
| "loss": 0.3655, |
| "step": 13020 |
| }, |
| { |
| "epoch": 7.31, |
| "learning_rate": 2.659605585959615e-05, |
| "loss": 0.3985, |
| "step": 13030 |
| }, |
| { |
| "epoch": 7.31, |
| "learning_rate": 2.6593225136818267e-05, |
| "loss": 0.3648, |
| "step": 13040 |
| }, |
| { |
| "epoch": 7.32, |
| "learning_rate": 2.6590394414040383e-05, |
| "loss": 0.3511, |
| "step": 13050 |
| }, |
| { |
| "epoch": 7.32, |
| "learning_rate": 2.6587563691262506e-05, |
| "loss": 0.3519, |
| "step": 13060 |
| }, |
| { |
| "epoch": 7.33, |
| "learning_rate": 2.658473296848462e-05, |
| "loss": 0.3439, |
| "step": 13070 |
| }, |
| { |
| "epoch": 7.34, |
| "learning_rate": 2.6581902245706737e-05, |
| "loss": 0.3686, |
| "step": 13080 |
| }, |
| { |
| "epoch": 7.34, |
| "learning_rate": 2.6579071522928853e-05, |
| "loss": 0.3684, |
| "step": 13090 |
| }, |
| { |
| "epoch": 7.35, |
| "learning_rate": 2.6576240800150972e-05, |
| "loss": 0.3435, |
| "step": 13100 |
| }, |
| { |
| "epoch": 7.35, |
| "learning_rate": 2.657341007737309e-05, |
| "loss": 0.3568, |
| "step": 13110 |
| }, |
| { |
| "epoch": 7.36, |
| "learning_rate": 2.6570579354595207e-05, |
| "loss": 0.3601, |
| "step": 13120 |
| }, |
| { |
| "epoch": 7.36, |
| "learning_rate": 2.6567748631817323e-05, |
| "loss": 0.3756, |
| "step": 13130 |
| }, |
| { |
| "epoch": 7.37, |
| "learning_rate": 2.6564917909039442e-05, |
| "loss": 0.3342, |
| "step": 13140 |
| }, |
| { |
| "epoch": 7.38, |
| "learning_rate": 2.656208718626156e-05, |
| "loss": 0.351, |
| "step": 13150 |
| }, |
| { |
| "epoch": 7.38, |
| "learning_rate": 2.6559256463483677e-05, |
| "loss": 0.3366, |
| "step": 13160 |
| }, |
| { |
| "epoch": 7.39, |
| "learning_rate": 2.6556425740705793e-05, |
| "loss": 0.3621, |
| "step": 13170 |
| }, |
| { |
| "epoch": 7.39, |
| "learning_rate": 2.6553595017927912e-05, |
| "loss": 0.3868, |
| "step": 13180 |
| }, |
| { |
| "epoch": 7.4, |
| "learning_rate": 2.6550764295150028e-05, |
| "loss": 0.3657, |
| "step": 13190 |
| }, |
| { |
| "epoch": 7.4, |
| "learning_rate": 2.6547933572372147e-05, |
| "loss": 0.3479, |
| "step": 13200 |
| }, |
| { |
| "epoch": 7.41, |
| "learning_rate": 2.6545102849594263e-05, |
| "loss": 0.3488, |
| "step": 13210 |
| }, |
| { |
| "epoch": 7.41, |
| "learning_rate": 2.6542272126816382e-05, |
| "loss": 0.3689, |
| "step": 13220 |
| }, |
| { |
| "epoch": 7.42, |
| "learning_rate": 2.6539441404038498e-05, |
| "loss": 0.3735, |
| "step": 13230 |
| }, |
| { |
| "epoch": 7.43, |
| "learning_rate": 2.6536610681260617e-05, |
| "loss": 0.3543, |
| "step": 13240 |
| }, |
| { |
| "epoch": 7.43, |
| "learning_rate": 2.6533779958482733e-05, |
| "loss": 0.3509, |
| "step": 13250 |
| }, |
| { |
| "epoch": 7.44, |
| "learning_rate": 2.6530949235704852e-05, |
| "loss": 0.3405, |
| "step": 13260 |
| }, |
| { |
| "epoch": 7.44, |
| "learning_rate": 2.6528118512926968e-05, |
| "loss": 0.3597, |
| "step": 13270 |
| }, |
| { |
| "epoch": 7.45, |
| "learning_rate": 2.6525287790149084e-05, |
| "loss": 0.3603, |
| "step": 13280 |
| }, |
| { |
| "epoch": 7.45, |
| "learning_rate": 2.6522457067371203e-05, |
| "loss": 0.3709, |
| "step": 13290 |
| }, |
| { |
| "epoch": 7.46, |
| "learning_rate": 2.6519626344593322e-05, |
| "loss": 0.3583, |
| "step": 13300 |
| }, |
| { |
| "epoch": 7.46, |
| "learning_rate": 2.6516795621815438e-05, |
| "loss": 0.3685, |
| "step": 13310 |
| }, |
| { |
| "epoch": 7.47, |
| "learning_rate": 2.6513964899037554e-05, |
| "loss": 0.3517, |
| "step": 13320 |
| }, |
| { |
| "epoch": 7.48, |
| "learning_rate": 2.6511134176259673e-05, |
| "loss": 0.3837, |
| "step": 13330 |
| }, |
| { |
| "epoch": 7.48, |
| "learning_rate": 2.650830345348179e-05, |
| "loss": 0.3594, |
| "step": 13340 |
| }, |
| { |
| "epoch": 7.49, |
| "learning_rate": 2.6505472730703908e-05, |
| "loss": 0.3653, |
| "step": 13350 |
| }, |
| { |
| "epoch": 7.49, |
| "learning_rate": 2.6502642007926024e-05, |
| "loss": 0.3491, |
| "step": 13360 |
| }, |
| { |
| "epoch": 7.5, |
| "learning_rate": 2.6499811285148143e-05, |
| "loss": 0.3579, |
| "step": 13370 |
| }, |
| { |
| "epoch": 7.5, |
| "learning_rate": 2.649698056237026e-05, |
| "loss": 0.399, |
| "step": 13380 |
| }, |
| { |
| "epoch": 7.51, |
| "learning_rate": 2.6494149839592378e-05, |
| "loss": 0.364, |
| "step": 13390 |
| }, |
| { |
| "epoch": 7.52, |
| "learning_rate": 2.6491319116814494e-05, |
| "loss": 0.3516, |
| "step": 13400 |
| }, |
| { |
| "epoch": 7.52, |
| "learning_rate": 2.648848839403661e-05, |
| "loss": 0.3498, |
| "step": 13410 |
| }, |
| { |
| "epoch": 7.53, |
| "learning_rate": 2.648565767125873e-05, |
| "loss": 0.3581, |
| "step": 13420 |
| }, |
| { |
| "epoch": 7.53, |
| "learning_rate": 2.6482826948480848e-05, |
| "loss": 0.3783, |
| "step": 13430 |
| }, |
| { |
| "epoch": 7.54, |
| "learning_rate": 2.6479996225702964e-05, |
| "loss": 0.3534, |
| "step": 13440 |
| }, |
| { |
| "epoch": 7.54, |
| "learning_rate": 2.647716550292508e-05, |
| "loss": 0.3475, |
| "step": 13450 |
| }, |
| { |
| "epoch": 7.55, |
| "learning_rate": 2.64743347801472e-05, |
| "loss": 0.3533, |
| "step": 13460 |
| }, |
| { |
| "epoch": 7.55, |
| "learning_rate": 2.6471504057369318e-05, |
| "loss": 0.3498, |
| "step": 13470 |
| }, |
| { |
| "epoch": 7.56, |
| "learning_rate": 2.6468673334591434e-05, |
| "loss": 0.3966, |
| "step": 13480 |
| }, |
| { |
| "epoch": 7.57, |
| "learning_rate": 2.646584261181355e-05, |
| "loss": 0.3514, |
| "step": 13490 |
| }, |
| { |
| "epoch": 7.57, |
| "learning_rate": 2.6463011889035665e-05, |
| "loss": 0.3407, |
| "step": 13500 |
| }, |
| { |
| "epoch": 7.58, |
| "learning_rate": 2.6460181166257788e-05, |
| "loss": 0.3588, |
| "step": 13510 |
| }, |
| { |
| "epoch": 7.58, |
| "learning_rate": 2.6457350443479904e-05, |
| "loss": 0.3511, |
| "step": 13520 |
| }, |
| { |
| "epoch": 7.59, |
| "learning_rate": 2.645451972070202e-05, |
| "loss": 0.3804, |
| "step": 13530 |
| }, |
| { |
| "epoch": 7.59, |
| "learning_rate": 2.6451688997924135e-05, |
| "loss": 0.3858, |
| "step": 13540 |
| }, |
| { |
| "epoch": 7.6, |
| "learning_rate": 2.6448858275146254e-05, |
| "loss": 0.3448, |
| "step": 13550 |
| }, |
| { |
| "epoch": 7.6, |
| "learning_rate": 2.6446027552368373e-05, |
| "loss": 0.3512, |
| "step": 13560 |
| }, |
| { |
| "epoch": 7.61, |
| "learning_rate": 2.644319682959049e-05, |
| "loss": 0.3413, |
| "step": 13570 |
| }, |
| { |
| "epoch": 7.62, |
| "learning_rate": 2.6440366106812605e-05, |
| "loss": 0.3743, |
| "step": 13580 |
| }, |
| { |
| "epoch": 7.62, |
| "learning_rate": 2.6437535384034724e-05, |
| "loss": 0.3438, |
| "step": 13590 |
| }, |
| { |
| "epoch": 7.63, |
| "learning_rate": 2.6434704661256843e-05, |
| "loss": 0.3558, |
| "step": 13600 |
| }, |
| { |
| "epoch": 7.63, |
| "learning_rate": 2.643187393847896e-05, |
| "loss": 0.3538, |
| "step": 13610 |
| }, |
| { |
| "epoch": 7.64, |
| "learning_rate": 2.6429043215701075e-05, |
| "loss": 0.3421, |
| "step": 13620 |
| }, |
| { |
| "epoch": 7.64, |
| "learning_rate": 2.6426212492923194e-05, |
| "loss": 0.3807, |
| "step": 13630 |
| }, |
| { |
| "epoch": 7.65, |
| "learning_rate": 2.642338177014531e-05, |
| "loss": 0.3443, |
| "step": 13640 |
| }, |
| { |
| "epoch": 7.66, |
| "learning_rate": 2.642055104736743e-05, |
| "loss": 0.3507, |
| "step": 13650 |
| }, |
| { |
| "epoch": 7.66, |
| "learning_rate": 2.6417720324589545e-05, |
| "loss": 0.356, |
| "step": 13660 |
| }, |
| { |
| "epoch": 7.67, |
| "learning_rate": 2.6414889601811664e-05, |
| "loss": 0.3591, |
| "step": 13670 |
| }, |
| { |
| "epoch": 7.67, |
| "learning_rate": 2.641205887903378e-05, |
| "loss": 0.3758, |
| "step": 13680 |
| }, |
| { |
| "epoch": 7.68, |
| "learning_rate": 2.64092281562559e-05, |
| "loss": 0.3589, |
| "step": 13690 |
| }, |
| { |
| "epoch": 7.68, |
| "learning_rate": 2.6406397433478015e-05, |
| "loss": 0.3459, |
| "step": 13700 |
| }, |
| { |
| "epoch": 7.69, |
| "learning_rate": 2.6403566710700134e-05, |
| "loss": 0.3415, |
| "step": 13710 |
| }, |
| { |
| "epoch": 7.69, |
| "learning_rate": 2.640073598792225e-05, |
| "loss": 0.3615, |
| "step": 13720 |
| }, |
| { |
| "epoch": 7.7, |
| "learning_rate": 2.639790526514437e-05, |
| "loss": 0.393, |
| "step": 13730 |
| }, |
| { |
| "epoch": 7.71, |
| "learning_rate": 2.6395074542366485e-05, |
| "loss": 0.3597, |
| "step": 13740 |
| }, |
| { |
| "epoch": 7.71, |
| "learning_rate": 2.63922438195886e-05, |
| "loss": 0.3541, |
| "step": 13750 |
| }, |
| { |
| "epoch": 7.72, |
| "learning_rate": 2.638941309681072e-05, |
| "loss": 0.3443, |
| "step": 13760 |
| }, |
| { |
| "epoch": 7.72, |
| "learning_rate": 2.6386582374032836e-05, |
| "loss": 0.3491, |
| "step": 13770 |
| }, |
| { |
| "epoch": 7.73, |
| "learning_rate": 2.6383751651254955e-05, |
| "loss": 0.3646, |
| "step": 13780 |
| }, |
| { |
| "epoch": 7.73, |
| "learning_rate": 2.638092092847707e-05, |
| "loss": 0.3461, |
| "step": 13790 |
| }, |
| { |
| "epoch": 7.74, |
| "learning_rate": 2.637809020569919e-05, |
| "loss": 0.3538, |
| "step": 13800 |
| }, |
| { |
| "epoch": 7.75, |
| "learning_rate": 2.6375259482921306e-05, |
| "loss": 0.3365, |
| "step": 13810 |
| }, |
| { |
| "epoch": 7.75, |
| "learning_rate": 2.6372428760143425e-05, |
| "loss": 0.3444, |
| "step": 13820 |
| }, |
| { |
| "epoch": 7.76, |
| "learning_rate": 2.636959803736554e-05, |
| "loss": 0.3719, |
| "step": 13830 |
| }, |
| { |
| "epoch": 7.76, |
| "learning_rate": 2.636676731458766e-05, |
| "loss": 0.3639, |
| "step": 13840 |
| }, |
| { |
| "epoch": 7.77, |
| "learning_rate": 2.6363936591809776e-05, |
| "loss": 0.3534, |
| "step": 13850 |
| }, |
| { |
| "epoch": 7.77, |
| "learning_rate": 2.636110586903189e-05, |
| "loss": 0.3509, |
| "step": 13860 |
| }, |
| { |
| "epoch": 7.78, |
| "learning_rate": 2.635827514625401e-05, |
| "loss": 0.3362, |
| "step": 13870 |
| }, |
| { |
| "epoch": 7.78, |
| "learning_rate": 2.635544442347613e-05, |
| "loss": 0.4063, |
| "step": 13880 |
| }, |
| { |
| "epoch": 7.79, |
| "learning_rate": 2.6352896772976036e-05, |
| "loss": 0.3447, |
| "step": 13890 |
| }, |
| { |
| "epoch": 7.8, |
| "learning_rate": 2.635006605019815e-05, |
| "loss": 0.3449, |
| "step": 13900 |
| }, |
| { |
| "epoch": 7.8, |
| "learning_rate": 2.6347235327420267e-05, |
| "loss": 0.3488, |
| "step": 13910 |
| }, |
| { |
| "epoch": 7.81, |
| "learning_rate": 2.6344404604642386e-05, |
| "loss": 0.3457, |
| "step": 13920 |
| }, |
| { |
| "epoch": 7.81, |
| "learning_rate": 2.6341573881864502e-05, |
| "loss": 0.3735, |
| "step": 13930 |
| }, |
| { |
| "epoch": 7.82, |
| "learning_rate": 2.633874315908662e-05, |
| "loss": 0.3595, |
| "step": 13940 |
| }, |
| { |
| "epoch": 7.82, |
| "learning_rate": 2.6335912436308737e-05, |
| "loss": 0.351, |
| "step": 13950 |
| }, |
| { |
| "epoch": 7.83, |
| "learning_rate": 2.6333081713530856e-05, |
| "loss": 0.3576, |
| "step": 13960 |
| }, |
| { |
| "epoch": 7.83, |
| "learning_rate": 2.6330250990752972e-05, |
| "loss": 0.3453, |
| "step": 13970 |
| }, |
| { |
| "epoch": 7.84, |
| "learning_rate": 2.632742026797509e-05, |
| "loss": 0.3824, |
| "step": 13980 |
| }, |
| { |
| "epoch": 7.85, |
| "learning_rate": 2.6324589545197207e-05, |
| "loss": 0.3526, |
| "step": 13990 |
| }, |
| { |
| "epoch": 7.85, |
| "learning_rate": 2.6321758822419326e-05, |
| "loss": 0.349, |
| "step": 14000 |
| }, |
| { |
| "epoch": 7.86, |
| "learning_rate": 2.6318928099641442e-05, |
| "loss": 0.3275, |
| "step": 14010 |
| }, |
| { |
| "epoch": 7.86, |
| "learning_rate": 2.6316097376863558e-05, |
| "loss": 0.3526, |
| "step": 14020 |
| }, |
| { |
| "epoch": 7.87, |
| "learning_rate": 2.6313266654085677e-05, |
| "loss": 0.3683, |
| "step": 14030 |
| }, |
| { |
| "epoch": 7.87, |
| "learning_rate": 2.6310435931307796e-05, |
| "loss": 0.3461, |
| "step": 14040 |
| }, |
| { |
| "epoch": 7.88, |
| "learning_rate": 2.6307605208529912e-05, |
| "loss": 0.3552, |
| "step": 14050 |
| }, |
| { |
| "epoch": 7.89, |
| "learning_rate": 2.6304774485752028e-05, |
| "loss": 0.3496, |
| "step": 14060 |
| }, |
| { |
| "epoch": 7.89, |
| "learning_rate": 2.6301943762974147e-05, |
| "loss": 0.3386, |
| "step": 14070 |
| }, |
| { |
| "epoch": 7.9, |
| "learning_rate": 2.6299113040196266e-05, |
| "loss": 0.3953, |
| "step": 14080 |
| }, |
| { |
| "epoch": 7.9, |
| "learning_rate": 2.6296282317418382e-05, |
| "loss": 0.3725, |
| "step": 14090 |
| }, |
| { |
| "epoch": 7.91, |
| "learning_rate": 2.6293451594640498e-05, |
| "loss": 0.3429, |
| "step": 14100 |
| }, |
| { |
| "epoch": 7.91, |
| "learning_rate": 2.6290620871862614e-05, |
| "loss": 0.3423, |
| "step": 14110 |
| }, |
| { |
| "epoch": 7.92, |
| "learning_rate": 2.6287790149084736e-05, |
| "loss": 0.3524, |
| "step": 14120 |
| }, |
| { |
| "epoch": 7.92, |
| "learning_rate": 2.6284959426306852e-05, |
| "loss": 0.3843, |
| "step": 14130 |
| }, |
| { |
| "epoch": 7.93, |
| "learning_rate": 2.6282128703528968e-05, |
| "loss": 0.3524, |
| "step": 14140 |
| }, |
| { |
| "epoch": 7.94, |
| "learning_rate": 2.6279297980751084e-05, |
| "loss": 0.347, |
| "step": 14150 |
| }, |
| { |
| "epoch": 7.94, |
| "learning_rate": 2.6276467257973206e-05, |
| "loss": 0.3466, |
| "step": 14160 |
| }, |
| { |
| "epoch": 7.95, |
| "learning_rate": 2.6273636535195322e-05, |
| "loss": 0.3298, |
| "step": 14170 |
| }, |
| { |
| "epoch": 7.95, |
| "learning_rate": 2.6270805812417438e-05, |
| "loss": 0.3875, |
| "step": 14180 |
| }, |
| { |
| "epoch": 7.96, |
| "learning_rate": 2.6267975089639554e-05, |
| "loss": 0.3591, |
| "step": 14190 |
| }, |
| { |
| "epoch": 7.96, |
| "learning_rate": 2.6265144366861673e-05, |
| "loss": 0.3518, |
| "step": 14200 |
| }, |
| { |
| "epoch": 7.97, |
| "learning_rate": 2.6262313644083792e-05, |
| "loss": 0.3396, |
| "step": 14210 |
| }, |
| { |
| "epoch": 7.97, |
| "learning_rate": 2.6259482921305908e-05, |
| "loss": 0.3378, |
| "step": 14220 |
| }, |
| { |
| "epoch": 7.98, |
| "learning_rate": 2.6256652198528024e-05, |
| "loss": 0.3919, |
| "step": 14230 |
| }, |
| { |
| "epoch": 7.99, |
| "learning_rate": 2.6253821475750143e-05, |
| "loss": 0.3686, |
| "step": 14240 |
| }, |
| { |
| "epoch": 7.99, |
| "learning_rate": 2.6250990752972262e-05, |
| "loss": 0.356, |
| "step": 14250 |
| }, |
| { |
| "epoch": 8.0, |
| "learning_rate": 2.6248160030194378e-05, |
| "loss": 0.3582, |
| "step": 14260 |
| }, |
| { |
| "epoch": 8.0, |
| "eval_cer": 0.18124778401296457, |
| "eval_loss": 0.4464899003505707, |
| "eval_runtime": 314.7637, |
| "eval_samples_per_second": 16.911, |
| "eval_steps_per_second": 4.229, |
| "eval_wer": 0.24615421019006128, |
| "step": 14264 |
| }, |
| { |
| "epoch": 8.0, |
| "learning_rate": 2.6245329307416494e-05, |
| "loss": 0.4036, |
| "step": 14270 |
| }, |
| { |
| "epoch": 8.01, |
| "learning_rate": 2.624249858463861e-05, |
| "loss": 0.3382, |
| "step": 14280 |
| }, |
| { |
| "epoch": 8.01, |
| "learning_rate": 2.623966786186073e-05, |
| "loss": 0.3453, |
| "step": 14290 |
| }, |
| { |
| "epoch": 8.02, |
| "learning_rate": 2.6236837139082848e-05, |
| "loss": 0.3323, |
| "step": 14300 |
| }, |
| { |
| "epoch": 8.03, |
| "learning_rate": 2.6234006416304963e-05, |
| "loss": 0.3598, |
| "step": 14310 |
| }, |
| { |
| "epoch": 8.03, |
| "learning_rate": 2.623117569352708e-05, |
| "loss": 0.3576, |
| "step": 14320 |
| }, |
| { |
| "epoch": 8.04, |
| "learning_rate": 2.62283449707492e-05, |
| "loss": 0.3269, |
| "step": 14330 |
| }, |
| { |
| "epoch": 8.04, |
| "learning_rate": 2.6225514247971318e-05, |
| "loss": 0.3389, |
| "step": 14340 |
| }, |
| { |
| "epoch": 8.05, |
| "learning_rate": 2.6222683525193433e-05, |
| "loss": 0.3475, |
| "step": 14350 |
| }, |
| { |
| "epoch": 8.05, |
| "learning_rate": 2.621985280241555e-05, |
| "loss": 0.3518, |
| "step": 14360 |
| }, |
| { |
| "epoch": 8.06, |
| "learning_rate": 2.621702207963767e-05, |
| "loss": 0.359, |
| "step": 14370 |
| }, |
| { |
| "epoch": 8.07, |
| "learning_rate": 2.6214191356859784e-05, |
| "loss": 0.3372, |
| "step": 14380 |
| }, |
| { |
| "epoch": 8.07, |
| "learning_rate": 2.6211360634081903e-05, |
| "loss": 0.3331, |
| "step": 14390 |
| }, |
| { |
| "epoch": 8.08, |
| "learning_rate": 2.620852991130402e-05, |
| "loss": 0.3404, |
| "step": 14400 |
| }, |
| { |
| "epoch": 8.08, |
| "learning_rate": 2.620569918852614e-05, |
| "loss": 0.3618, |
| "step": 14410 |
| }, |
| { |
| "epoch": 8.09, |
| "learning_rate": 2.6202868465748254e-05, |
| "loss": 0.363, |
| "step": 14420 |
| }, |
| { |
| "epoch": 8.09, |
| "learning_rate": 2.6200037742970373e-05, |
| "loss": 0.3361, |
| "step": 14430 |
| }, |
| { |
| "epoch": 8.1, |
| "learning_rate": 2.619720702019249e-05, |
| "loss": 0.3276, |
| "step": 14440 |
| }, |
| { |
| "epoch": 8.1, |
| "learning_rate": 2.619437629741461e-05, |
| "loss": 0.3287, |
| "step": 14450 |
| }, |
| { |
| "epoch": 8.11, |
| "learning_rate": 2.6191545574636724e-05, |
| "loss": 0.3428, |
| "step": 14460 |
| }, |
| { |
| "epoch": 8.12, |
| "learning_rate": 2.618871485185884e-05, |
| "loss": 0.3664, |
| "step": 14470 |
| }, |
| { |
| "epoch": 8.12, |
| "learning_rate": 2.618588412908096e-05, |
| "loss": 0.3341, |
| "step": 14480 |
| }, |
| { |
| "epoch": 8.13, |
| "learning_rate": 2.618305340630308e-05, |
| "loss": 0.3354, |
| "step": 14490 |
| }, |
| { |
| "epoch": 8.13, |
| "learning_rate": 2.6180222683525194e-05, |
| "loss": 0.3363, |
| "step": 14500 |
| }, |
| { |
| "epoch": 8.14, |
| "learning_rate": 2.617739196074731e-05, |
| "loss": 0.3458, |
| "step": 14510 |
| }, |
| { |
| "epoch": 8.14, |
| "learning_rate": 2.617456123796943e-05, |
| "loss": 0.3484, |
| "step": 14520 |
| }, |
| { |
| "epoch": 8.15, |
| "learning_rate": 2.617173051519155e-05, |
| "loss": 0.3427, |
| "step": 14530 |
| }, |
| { |
| "epoch": 8.15, |
| "learning_rate": 2.6168899792413664e-05, |
| "loss": 0.3414, |
| "step": 14540 |
| }, |
| { |
| "epoch": 8.16, |
| "learning_rate": 2.616606906963578e-05, |
| "loss": 0.3357, |
| "step": 14550 |
| }, |
| { |
| "epoch": 8.17, |
| "learning_rate": 2.6163238346857896e-05, |
| "loss": 0.3287, |
| "step": 14560 |
| }, |
| { |
| "epoch": 8.17, |
| "learning_rate": 2.616040762408002e-05, |
| "loss": 0.3572, |
| "step": 14570 |
| }, |
| { |
| "epoch": 8.18, |
| "learning_rate": 2.6157576901302134e-05, |
| "loss": 0.3342, |
| "step": 14580 |
| }, |
| { |
| "epoch": 8.18, |
| "learning_rate": 2.615474617852425e-05, |
| "loss": 0.3352, |
| "step": 14590 |
| }, |
| { |
| "epoch": 8.19, |
| "learning_rate": 2.6151915455746366e-05, |
| "loss": 0.3433, |
| "step": 14600 |
| }, |
| { |
| "epoch": 8.19, |
| "learning_rate": 2.614908473296849e-05, |
| "loss": 0.3435, |
| "step": 14610 |
| }, |
| { |
| "epoch": 8.2, |
| "learning_rate": 2.6146254010190604e-05, |
| "loss": 0.3501, |
| "step": 14620 |
| }, |
| { |
| "epoch": 8.21, |
| "learning_rate": 2.614342328741272e-05, |
| "loss": 0.3388, |
| "step": 14630 |
| }, |
| { |
| "epoch": 8.21, |
| "learning_rate": 2.6140592564634836e-05, |
| "loss": 0.3388, |
| "step": 14640 |
| }, |
| { |
| "epoch": 8.22, |
| "learning_rate": 2.6137761841856955e-05, |
| "loss": 0.3406, |
| "step": 14650 |
| }, |
| { |
| "epoch": 8.22, |
| "learning_rate": 2.6134931119079074e-05, |
| "loss": 0.3525, |
| "step": 14660 |
| }, |
| { |
| "epoch": 8.23, |
| "learning_rate": 2.613210039630119e-05, |
| "loss": 0.372, |
| "step": 14670 |
| }, |
| { |
| "epoch": 8.23, |
| "learning_rate": 2.6129269673523306e-05, |
| "loss": 0.3313, |
| "step": 14680 |
| }, |
| { |
| "epoch": 8.24, |
| "learning_rate": 2.6126438950745425e-05, |
| "loss": 0.3378, |
| "step": 14690 |
| }, |
| { |
| "epoch": 8.24, |
| "learning_rate": 2.6123608227967544e-05, |
| "loss": 0.3208, |
| "step": 14700 |
| }, |
| { |
| "epoch": 8.25, |
| "learning_rate": 2.612077750518966e-05, |
| "loss": 0.3452, |
| "step": 14710 |
| }, |
| { |
| "epoch": 8.26, |
| "learning_rate": 2.6117946782411776e-05, |
| "loss": 0.3452, |
| "step": 14720 |
| }, |
| { |
| "epoch": 8.26, |
| "learning_rate": 2.611511605963389e-05, |
| "loss": 0.3341, |
| "step": 14730 |
| }, |
| { |
| "epoch": 8.27, |
| "learning_rate": 2.611228533685601e-05, |
| "loss": 0.3257, |
| "step": 14740 |
| }, |
| { |
| "epoch": 8.27, |
| "learning_rate": 2.610945461407813e-05, |
| "loss": 0.332, |
| "step": 14750 |
| }, |
| { |
| "epoch": 8.28, |
| "learning_rate": 2.6106623891300246e-05, |
| "loss": 0.3568, |
| "step": 14760 |
| }, |
| { |
| "epoch": 8.28, |
| "learning_rate": 2.610379316852236e-05, |
| "loss": 0.3497, |
| "step": 14770 |
| }, |
| { |
| "epoch": 8.29, |
| "learning_rate": 2.610096244574448e-05, |
| "loss": 0.3358, |
| "step": 14780 |
| }, |
| { |
| "epoch": 8.29, |
| "learning_rate": 2.60981317229666e-05, |
| "loss": 0.3242, |
| "step": 14790 |
| }, |
| { |
| "epoch": 8.3, |
| "learning_rate": 2.6095301000188716e-05, |
| "loss": 0.3305, |
| "step": 14800 |
| }, |
| { |
| "epoch": 8.31, |
| "learning_rate": 2.609247027741083e-05, |
| "loss": 0.3442, |
| "step": 14810 |
| }, |
| { |
| "epoch": 8.31, |
| "learning_rate": 2.608963955463295e-05, |
| "loss": 0.3414, |
| "step": 14820 |
| }, |
| { |
| "epoch": 8.32, |
| "learning_rate": 2.6086808831855066e-05, |
| "loss": 0.3287, |
| "step": 14830 |
| }, |
| { |
| "epoch": 8.32, |
| "learning_rate": 2.6083978109077186e-05, |
| "loss": 0.3314, |
| "step": 14840 |
| }, |
| { |
| "epoch": 8.33, |
| "learning_rate": 2.60811473862993e-05, |
| "loss": 0.3329, |
| "step": 14850 |
| }, |
| { |
| "epoch": 8.33, |
| "learning_rate": 2.607831666352142e-05, |
| "loss": 0.339, |
| "step": 14860 |
| }, |
| { |
| "epoch": 8.34, |
| "learning_rate": 2.6075485940743536e-05, |
| "loss": 0.3536, |
| "step": 14870 |
| }, |
| { |
| "epoch": 8.35, |
| "learning_rate": 2.6072655217965656e-05, |
| "loss": 0.3384, |
| "step": 14880 |
| }, |
| { |
| "epoch": 8.35, |
| "learning_rate": 2.606982449518777e-05, |
| "loss": 0.3292, |
| "step": 14890 |
| }, |
| { |
| "epoch": 8.36, |
| "learning_rate": 2.606699377240989e-05, |
| "loss": 0.3316, |
| "step": 14900 |
| }, |
| { |
| "epoch": 8.36, |
| "learning_rate": 2.6064163049632006e-05, |
| "loss": 0.3545, |
| "step": 14910 |
| }, |
| { |
| "epoch": 8.37, |
| "learning_rate": 2.6061332326854122e-05, |
| "loss": 0.3436, |
| "step": 14920 |
| }, |
| { |
| "epoch": 8.37, |
| "learning_rate": 2.605850160407624e-05, |
| "loss": 0.3399, |
| "step": 14930 |
| }, |
| { |
| "epoch": 8.38, |
| "learning_rate": 2.605567088129836e-05, |
| "loss": 0.3394, |
| "step": 14940 |
| }, |
| { |
| "epoch": 8.38, |
| "learning_rate": 2.6052840158520476e-05, |
| "loss": 0.3308, |
| "step": 14950 |
| }, |
| { |
| "epoch": 8.39, |
| "learning_rate": 2.6050009435742592e-05, |
| "loss": 0.3449, |
| "step": 14960 |
| }, |
| { |
| "epoch": 8.4, |
| "learning_rate": 2.604717871296471e-05, |
| "loss": 0.3452, |
| "step": 14970 |
| }, |
| { |
| "epoch": 8.4, |
| "learning_rate": 2.604434799018683e-05, |
| "loss": 0.3421, |
| "step": 14980 |
| }, |
| { |
| "epoch": 8.41, |
| "learning_rate": 2.6041517267408946e-05, |
| "loss": 0.3381, |
| "step": 14990 |
| }, |
| { |
| "epoch": 8.41, |
| "learning_rate": 2.6038686544631062e-05, |
| "loss": 0.3347, |
| "step": 15000 |
| }, |
| { |
| "epoch": 8.42, |
| "learning_rate": 2.6035855821853178e-05, |
| "loss": 0.343, |
| "step": 15010 |
| }, |
| { |
| "epoch": 8.42, |
| "learning_rate": 2.60330250990753e-05, |
| "loss": 0.3512, |
| "step": 15020 |
| }, |
| { |
| "epoch": 8.43, |
| "learning_rate": 2.6030194376297416e-05, |
| "loss": 0.3309, |
| "step": 15030 |
| }, |
| { |
| "epoch": 8.44, |
| "learning_rate": 2.6027363653519532e-05, |
| "loss": 0.3345, |
| "step": 15040 |
| }, |
| { |
| "epoch": 8.44, |
| "learning_rate": 2.6024532930741648e-05, |
| "loss": 0.3419, |
| "step": 15050 |
| }, |
| { |
| "epoch": 8.45, |
| "learning_rate": 2.602170220796377e-05, |
| "loss": 0.347, |
| "step": 15060 |
| }, |
| { |
| "epoch": 8.45, |
| "learning_rate": 2.6018871485185886e-05, |
| "loss": 0.3363, |
| "step": 15070 |
| }, |
| { |
| "epoch": 8.46, |
| "learning_rate": 2.6016040762408002e-05, |
| "loss": 0.3435, |
| "step": 15080 |
| }, |
| { |
| "epoch": 8.46, |
| "learning_rate": 2.6013210039630118e-05, |
| "loss": 0.3195, |
| "step": 15090 |
| }, |
| { |
| "epoch": 8.47, |
| "learning_rate": 2.6010379316852237e-05, |
| "loss": 0.3421, |
| "step": 15100 |
| }, |
| { |
| "epoch": 8.47, |
| "learning_rate": 2.6007548594074356e-05, |
| "loss": 0.3302, |
| "step": 15110 |
| }, |
| { |
| "epoch": 8.48, |
| "learning_rate": 2.6004717871296472e-05, |
| "loss": 0.3576, |
| "step": 15120 |
| }, |
| { |
| "epoch": 8.49, |
| "learning_rate": 2.6001887148518588e-05, |
| "loss": 0.344, |
| "step": 15130 |
| }, |
| { |
| "epoch": 8.49, |
| "learning_rate": 2.5999056425740704e-05, |
| "loss": 0.3384, |
| "step": 15140 |
| }, |
| { |
| "epoch": 8.5, |
| "learning_rate": 2.5996225702962826e-05, |
| "loss": 0.3398, |
| "step": 15150 |
| }, |
| { |
| "epoch": 8.5, |
| "learning_rate": 2.5993394980184942e-05, |
| "loss": 0.3493, |
| "step": 15160 |
| }, |
| { |
| "epoch": 8.51, |
| "learning_rate": 2.5990564257407058e-05, |
| "loss": 0.3606, |
| "step": 15170 |
| }, |
| { |
| "epoch": 8.51, |
| "learning_rate": 2.5987733534629174e-05, |
| "loss": 0.3372, |
| "step": 15180 |
| }, |
| { |
| "epoch": 8.52, |
| "learning_rate": 2.5984902811851293e-05, |
| "loss": 0.3246, |
| "step": 15190 |
| }, |
| { |
| "epoch": 8.52, |
| "learning_rate": 2.5982072089073412e-05, |
| "loss": 0.328, |
| "step": 15200 |
| }, |
| { |
| "epoch": 8.53, |
| "learning_rate": 2.5979241366295528e-05, |
| "loss": 0.3484, |
| "step": 15210 |
| }, |
| { |
| "epoch": 8.54, |
| "learning_rate": 2.5976410643517644e-05, |
| "loss": 0.3562, |
| "step": 15220 |
| }, |
| { |
| "epoch": 8.54, |
| "learning_rate": 2.5973579920739763e-05, |
| "loss": 0.3373, |
| "step": 15230 |
| }, |
| { |
| "epoch": 8.55, |
| "learning_rate": 2.5970749197961882e-05, |
| "loss": 0.3239, |
| "step": 15240 |
| }, |
| { |
| "epoch": 8.55, |
| "learning_rate": 2.5967918475183998e-05, |
| "loss": 0.3349, |
| "step": 15250 |
| }, |
| { |
| "epoch": 8.56, |
| "learning_rate": 2.5965087752406114e-05, |
| "loss": 0.3408, |
| "step": 15260 |
| }, |
| { |
| "epoch": 8.56, |
| "learning_rate": 2.5962257029628233e-05, |
| "loss": 0.3503, |
| "step": 15270 |
| }, |
| { |
| "epoch": 8.57, |
| "learning_rate": 2.595942630685035e-05, |
| "loss": 0.3314, |
| "step": 15280 |
| }, |
| { |
| "epoch": 8.58, |
| "learning_rate": 2.5956595584072468e-05, |
| "loss": 0.3231, |
| "step": 15290 |
| }, |
| { |
| "epoch": 8.58, |
| "learning_rate": 2.5953764861294584e-05, |
| "loss": 0.3449, |
| "step": 15300 |
| }, |
| { |
| "epoch": 8.59, |
| "learning_rate": 2.5950934138516703e-05, |
| "loss": 0.3376, |
| "step": 15310 |
| }, |
| { |
| "epoch": 8.59, |
| "learning_rate": 2.594810341573882e-05, |
| "loss": 0.3576, |
| "step": 15320 |
| }, |
| { |
| "epoch": 8.6, |
| "learning_rate": 2.5945272692960938e-05, |
| "loss": 0.325, |
| "step": 15330 |
| }, |
| { |
| "epoch": 8.6, |
| "learning_rate": 2.5942441970183054e-05, |
| "loss": 0.3261, |
| "step": 15340 |
| }, |
| { |
| "epoch": 8.61, |
| "learning_rate": 2.5939611247405173e-05, |
| "loss": 0.3429, |
| "step": 15350 |
| }, |
| { |
| "epoch": 8.61, |
| "learning_rate": 2.593678052462729e-05, |
| "loss": 0.3739, |
| "step": 15360 |
| }, |
| { |
| "epoch": 8.62, |
| "learning_rate": 2.5933949801849404e-05, |
| "loss": 0.3402, |
| "step": 15370 |
| }, |
| { |
| "epoch": 8.63, |
| "learning_rate": 2.5931119079071524e-05, |
| "loss": 0.3247, |
| "step": 15380 |
| }, |
| { |
| "epoch": 8.63, |
| "learning_rate": 2.5928288356293643e-05, |
| "loss": 0.337, |
| "step": 15390 |
| }, |
| { |
| "epoch": 8.64, |
| "learning_rate": 2.592545763351576e-05, |
| "loss": 0.3288, |
| "step": 15400 |
| }, |
| { |
| "epoch": 8.64, |
| "learning_rate": 2.5922626910737874e-05, |
| "loss": 0.3353, |
| "step": 15410 |
| }, |
| { |
| "epoch": 8.65, |
| "learning_rate": 2.5919796187959994e-05, |
| "loss": 0.3567, |
| "step": 15420 |
| }, |
| { |
| "epoch": 8.65, |
| "learning_rate": 2.5916965465182113e-05, |
| "loss": 0.334, |
| "step": 15430 |
| }, |
| { |
| "epoch": 8.66, |
| "learning_rate": 2.591413474240423e-05, |
| "loss": 0.3309, |
| "step": 15440 |
| }, |
| { |
| "epoch": 8.66, |
| "learning_rate": 2.5911304019626344e-05, |
| "loss": 0.3256, |
| "step": 15450 |
| }, |
| { |
| "epoch": 8.67, |
| "learning_rate": 2.590847329684846e-05, |
| "loss": 0.3486, |
| "step": 15460 |
| }, |
| { |
| "epoch": 8.68, |
| "learning_rate": 2.5905642574070583e-05, |
| "loss": 0.3439, |
| "step": 15470 |
| }, |
| { |
| "epoch": 8.68, |
| "learning_rate": 2.59028118512927e-05, |
| "loss": 0.3373, |
| "step": 15480 |
| }, |
| { |
| "epoch": 8.69, |
| "learning_rate": 2.5899981128514814e-05, |
| "loss": 0.3372, |
| "step": 15490 |
| }, |
| { |
| "epoch": 8.69, |
| "learning_rate": 2.589715040573693e-05, |
| "loss": 0.3414, |
| "step": 15500 |
| }, |
| { |
| "epoch": 8.7, |
| "learning_rate": 2.5894319682959053e-05, |
| "loss": 0.3431, |
| "step": 15510 |
| }, |
| { |
| "epoch": 8.7, |
| "learning_rate": 2.589148896018117e-05, |
| "loss": 0.3397, |
| "step": 15520 |
| }, |
| { |
| "epoch": 8.71, |
| "learning_rate": 2.5888658237403284e-05, |
| "loss": 0.3314, |
| "step": 15530 |
| }, |
| { |
| "epoch": 8.72, |
| "learning_rate": 2.58858275146254e-05, |
| "loss": 0.3538, |
| "step": 15540 |
| }, |
| { |
| "epoch": 8.72, |
| "learning_rate": 2.5882996791847516e-05, |
| "loss": 0.3265, |
| "step": 15550 |
| }, |
| { |
| "epoch": 8.73, |
| "learning_rate": 2.588016606906964e-05, |
| "loss": 0.3563, |
| "step": 15560 |
| }, |
| { |
| "epoch": 8.73, |
| "learning_rate": 2.5877335346291754e-05, |
| "loss": 0.3536, |
| "step": 15570 |
| }, |
| { |
| "epoch": 8.74, |
| "learning_rate": 2.587450462351387e-05, |
| "loss": 0.3327, |
| "step": 15580 |
| }, |
| { |
| "epoch": 8.74, |
| "learning_rate": 2.5871673900735986e-05, |
| "loss": 0.3281, |
| "step": 15590 |
| }, |
| { |
| "epoch": 8.75, |
| "learning_rate": 2.586884317795811e-05, |
| "loss": 0.3261, |
| "step": 15600 |
| }, |
| { |
| "epoch": 8.75, |
| "learning_rate": 2.5866012455180224e-05, |
| "loss": 0.3557, |
| "step": 15610 |
| }, |
| { |
| "epoch": 8.76, |
| "learning_rate": 2.586318173240234e-05, |
| "loss": 0.3315, |
| "step": 15620 |
| }, |
| { |
| "epoch": 8.77, |
| "learning_rate": 2.5860351009624456e-05, |
| "loss": 0.3399, |
| "step": 15630 |
| }, |
| { |
| "epoch": 8.77, |
| "learning_rate": 2.5857520286846575e-05, |
| "loss": 0.322, |
| "step": 15640 |
| }, |
| { |
| "epoch": 8.78, |
| "learning_rate": 2.5854689564068694e-05, |
| "loss": 0.3488, |
| "step": 15650 |
| }, |
| { |
| "epoch": 8.78, |
| "learning_rate": 2.585185884129081e-05, |
| "loss": 0.3374, |
| "step": 15660 |
| }, |
| { |
| "epoch": 8.79, |
| "learning_rate": 2.5849028118512926e-05, |
| "loss": 0.3558, |
| "step": 15670 |
| }, |
| { |
| "epoch": 8.79, |
| "learning_rate": 2.5846197395735045e-05, |
| "loss": 0.3214, |
| "step": 15680 |
| }, |
| { |
| "epoch": 8.8, |
| "learning_rate": 2.5843366672957164e-05, |
| "loss": 0.3215, |
| "step": 15690 |
| }, |
| { |
| "epoch": 8.81, |
| "learning_rate": 2.584053595017928e-05, |
| "loss": 0.3302, |
| "step": 15700 |
| }, |
| { |
| "epoch": 8.81, |
| "learning_rate": 2.5837705227401396e-05, |
| "loss": 0.3448, |
| "step": 15710 |
| }, |
| { |
| "epoch": 8.82, |
| "learning_rate": 2.5834874504623515e-05, |
| "loss": 0.3611, |
| "step": 15720 |
| }, |
| { |
| "epoch": 8.82, |
| "learning_rate": 2.583204378184563e-05, |
| "loss": 0.3457, |
| "step": 15730 |
| }, |
| { |
| "epoch": 8.83, |
| "learning_rate": 2.582921305906775e-05, |
| "loss": 0.3464, |
| "step": 15740 |
| }, |
| { |
| "epoch": 8.83, |
| "learning_rate": 2.5826382336289866e-05, |
| "loss": 0.3418, |
| "step": 15750 |
| }, |
| { |
| "epoch": 8.84, |
| "learning_rate": 2.5823551613511985e-05, |
| "loss": 0.347, |
| "step": 15760 |
| }, |
| { |
| "epoch": 8.84, |
| "learning_rate": 2.58207208907341e-05, |
| "loss": 0.3399, |
| "step": 15770 |
| }, |
| { |
| "epoch": 8.85, |
| "learning_rate": 2.581789016795622e-05, |
| "loss": 0.3353, |
| "step": 15780 |
| }, |
| { |
| "epoch": 8.86, |
| "learning_rate": 2.5815059445178336e-05, |
| "loss": 0.3409, |
| "step": 15790 |
| }, |
| { |
| "epoch": 8.86, |
| "learning_rate": 2.5812228722400455e-05, |
| "loss": 0.3329, |
| "step": 15800 |
| }, |
| { |
| "epoch": 8.87, |
| "learning_rate": 2.580939799962257e-05, |
| "loss": 0.3464, |
| "step": 15810 |
| }, |
| { |
| "epoch": 8.87, |
| "learning_rate": 2.5806567276844687e-05, |
| "loss": 0.3464, |
| "step": 15820 |
| }, |
| { |
| "epoch": 8.88, |
| "learning_rate": 2.5803736554066806e-05, |
| "loss": 0.324, |
| "step": 15830 |
| }, |
| { |
| "epoch": 8.88, |
| "learning_rate": 2.5800905831288925e-05, |
| "loss": 0.3162, |
| "step": 15840 |
| }, |
| { |
| "epoch": 8.89, |
| "learning_rate": 2.579807510851104e-05, |
| "loss": 0.3311, |
| "step": 15850 |
| }, |
| { |
| "epoch": 8.89, |
| "learning_rate": 2.5795244385733156e-05, |
| "loss": 0.3524, |
| "step": 15860 |
| }, |
| { |
| "epoch": 8.9, |
| "learning_rate": 2.5792413662955276e-05, |
| "loss": 0.3537, |
| "step": 15870 |
| }, |
| { |
| "epoch": 8.91, |
| "learning_rate": 2.5789582940177395e-05, |
| "loss": 0.3356, |
| "step": 15880 |
| }, |
| { |
| "epoch": 8.91, |
| "learning_rate": 2.578675221739951e-05, |
| "loss": 0.3443, |
| "step": 15890 |
| }, |
| { |
| "epoch": 8.92, |
| "learning_rate": 2.5783921494621626e-05, |
| "loss": 0.3157, |
| "step": 15900 |
| }, |
| { |
| "epoch": 8.92, |
| "learning_rate": 2.5781090771843742e-05, |
| "loss": 0.3508, |
| "step": 15910 |
| }, |
| { |
| "epoch": 8.93, |
| "learning_rate": 2.5778260049065865e-05, |
| "loss": 0.3498, |
| "step": 15920 |
| }, |
| { |
| "epoch": 8.93, |
| "learning_rate": 2.5775712398565767e-05, |
| "loss": 0.3234, |
| "step": 15930 |
| }, |
| { |
| "epoch": 8.94, |
| "learning_rate": 2.5772881675787886e-05, |
| "loss": 0.3112, |
| "step": 15940 |
| }, |
| { |
| "epoch": 8.95, |
| "learning_rate": 2.5770050953010002e-05, |
| "loss": 0.3243, |
| "step": 15950 |
| }, |
| { |
| "epoch": 8.95, |
| "learning_rate": 2.576722023023212e-05, |
| "loss": 0.3452, |
| "step": 15960 |
| }, |
| { |
| "epoch": 8.96, |
| "learning_rate": 2.5764389507454237e-05, |
| "loss": 0.3573, |
| "step": 15970 |
| }, |
| { |
| "epoch": 8.96, |
| "learning_rate": 2.5761558784676356e-05, |
| "loss": 0.332, |
| "step": 15980 |
| }, |
| { |
| "epoch": 8.97, |
| "learning_rate": 2.5758728061898472e-05, |
| "loss": 0.3295, |
| "step": 15990 |
| }, |
| { |
| "epoch": 8.97, |
| "learning_rate": 2.575589733912059e-05, |
| "loss": 0.3174, |
| "step": 16000 |
| }, |
| { |
| "epoch": 8.98, |
| "learning_rate": 2.5753066616342707e-05, |
| "loss": 0.3429, |
| "step": 16010 |
| }, |
| { |
| "epoch": 8.98, |
| "learning_rate": 2.5750235893564823e-05, |
| "loss": 0.3563, |
| "step": 16020 |
| }, |
| { |
| "epoch": 8.99, |
| "learning_rate": 2.5747405170786942e-05, |
| "loss": 0.3291, |
| "step": 16030 |
| }, |
| { |
| "epoch": 9.0, |
| "learning_rate": 2.574457444800906e-05, |
| "loss": 0.3348, |
| "step": 16040 |
| }, |
| { |
| "epoch": 9.0, |
| "eval_cer": 0.17427635841165476, |
| "eval_loss": 0.4341621696949005, |
| "eval_runtime": 317.7823, |
| "eval_samples_per_second": 16.75, |
| "eval_steps_per_second": 4.188, |
| "eval_wer": 0.23679180546390044, |
| "step": 16047 |
| }, |
| { |
| "epoch": 9.0, |
| "learning_rate": 2.5741743725231177e-05, |
| "loss": 0.3825, |
| "step": 16050 |
| }, |
| { |
| "epoch": 9.01, |
| "learning_rate": 2.5738913002453293e-05, |
| "loss": 0.3182, |
| "step": 16060 |
| }, |
| { |
| "epoch": 9.01, |
| "learning_rate": 2.5736082279675412e-05, |
| "loss": 0.3293, |
| "step": 16070 |
| }, |
| { |
| "epoch": 9.02, |
| "learning_rate": 2.5733251556897528e-05, |
| "loss": 0.3108, |
| "step": 16080 |
| }, |
| { |
| "epoch": 9.02, |
| "learning_rate": 2.5730420834119647e-05, |
| "loss": 0.3297, |
| "step": 16090 |
| }, |
| { |
| "epoch": 9.03, |
| "learning_rate": 2.5727590111341763e-05, |
| "loss": 0.3334, |
| "step": 16100 |
| }, |
| { |
| "epoch": 9.04, |
| "learning_rate": 2.572475938856388e-05, |
| "loss": 0.3244, |
| "step": 16110 |
| }, |
| { |
| "epoch": 9.04, |
| "learning_rate": 2.5721928665785998e-05, |
| "loss": 0.316, |
| "step": 16120 |
| }, |
| { |
| "epoch": 9.05, |
| "learning_rate": 2.5719097943008117e-05, |
| "loss": 0.3192, |
| "step": 16130 |
| }, |
| { |
| "epoch": 9.05, |
| "learning_rate": 2.5716267220230233e-05, |
| "loss": 0.3202, |
| "step": 16140 |
| }, |
| { |
| "epoch": 9.06, |
| "learning_rate": 2.571343649745235e-05, |
| "loss": 0.349, |
| "step": 16150 |
| }, |
| { |
| "epoch": 9.06, |
| "learning_rate": 2.5710605774674468e-05, |
| "loss": 0.3221, |
| "step": 16160 |
| }, |
| { |
| "epoch": 9.07, |
| "learning_rate": 2.5707775051896587e-05, |
| "loss": 0.3183, |
| "step": 16170 |
| }, |
| { |
| "epoch": 9.07, |
| "learning_rate": 2.570522740139649e-05, |
| "loss": 0.3126, |
| "step": 16180 |
| }, |
| { |
| "epoch": 9.08, |
| "learning_rate": 2.5702396678618608e-05, |
| "loss": 0.3281, |
| "step": 16190 |
| }, |
| { |
| "epoch": 9.09, |
| "learning_rate": 2.5699565955840724e-05, |
| "loss": 0.3395, |
| "step": 16200 |
| }, |
| { |
| "epoch": 9.09, |
| "learning_rate": 2.5696735233062843e-05, |
| "loss": 0.3246, |
| "step": 16210 |
| }, |
| { |
| "epoch": 9.1, |
| "learning_rate": 2.569390451028496e-05, |
| "loss": 0.315, |
| "step": 16220 |
| }, |
| { |
| "epoch": 9.1, |
| "learning_rate": 2.5691073787507078e-05, |
| "loss": 0.3237, |
| "step": 16230 |
| }, |
| { |
| "epoch": 9.11, |
| "learning_rate": 2.5688243064729194e-05, |
| "loss": 0.3175, |
| "step": 16240 |
| }, |
| { |
| "epoch": 9.11, |
| "learning_rate": 2.5685412341951313e-05, |
| "loss": 0.3375, |
| "step": 16250 |
| }, |
| { |
| "epoch": 9.12, |
| "learning_rate": 2.568258161917343e-05, |
| "loss": 0.3223, |
| "step": 16260 |
| }, |
| { |
| "epoch": 9.13, |
| "learning_rate": 2.5679750896395545e-05, |
| "loss": 0.306, |
| "step": 16270 |
| }, |
| { |
| "epoch": 9.13, |
| "learning_rate": 2.5676920173617664e-05, |
| "loss": 0.3246, |
| "step": 16280 |
| }, |
| { |
| "epoch": 9.14, |
| "learning_rate": 2.5674089450839783e-05, |
| "loss": 0.3322, |
| "step": 16290 |
| }, |
| { |
| "epoch": 9.14, |
| "learning_rate": 2.56712587280619e-05, |
| "loss": 0.3303, |
| "step": 16300 |
| }, |
| { |
| "epoch": 9.15, |
| "learning_rate": 2.5668428005284015e-05, |
| "loss": 0.3112, |
| "step": 16310 |
| }, |
| { |
| "epoch": 9.15, |
| "learning_rate": 2.5665597282506134e-05, |
| "loss": 0.3265, |
| "step": 16320 |
| }, |
| { |
| "epoch": 9.16, |
| "learning_rate": 2.5662766559728253e-05, |
| "loss": 0.3269, |
| "step": 16330 |
| }, |
| { |
| "epoch": 9.16, |
| "learning_rate": 2.565993583695037e-05, |
| "loss": 0.3181, |
| "step": 16340 |
| }, |
| { |
| "epoch": 9.17, |
| "learning_rate": 2.5657105114172485e-05, |
| "loss": 0.336, |
| "step": 16350 |
| }, |
| { |
| "epoch": 9.18, |
| "learning_rate": 2.56542743913946e-05, |
| "loss": 0.3292, |
| "step": 16360 |
| }, |
| { |
| "epoch": 9.18, |
| "learning_rate": 2.5651443668616723e-05, |
| "loss": 0.3196, |
| "step": 16370 |
| }, |
| { |
| "epoch": 9.19, |
| "learning_rate": 2.564861294583884e-05, |
| "loss": 0.317, |
| "step": 16380 |
| }, |
| { |
| "epoch": 9.19, |
| "learning_rate": 2.5645782223060955e-05, |
| "loss": 0.3186, |
| "step": 16390 |
| }, |
| { |
| "epoch": 9.2, |
| "learning_rate": 2.564295150028307e-05, |
| "loss": 0.3339, |
| "step": 16400 |
| }, |
| { |
| "epoch": 9.2, |
| "learning_rate": 2.5640120777505193e-05, |
| "loss": 0.3175, |
| "step": 16410 |
| }, |
| { |
| "epoch": 9.21, |
| "learning_rate": 2.563729005472731e-05, |
| "loss": 0.3252, |
| "step": 16420 |
| }, |
| { |
| "epoch": 9.21, |
| "learning_rate": 2.5634459331949425e-05, |
| "loss": 0.3164, |
| "step": 16430 |
| }, |
| { |
| "epoch": 9.22, |
| "learning_rate": 2.563162860917154e-05, |
| "loss": 0.3189, |
| "step": 16440 |
| }, |
| { |
| "epoch": 9.23, |
| "learning_rate": 2.562879788639366e-05, |
| "loss": 0.3411, |
| "step": 16450 |
| }, |
| { |
| "epoch": 9.23, |
| "learning_rate": 2.562596716361578e-05, |
| "loss": 0.3058, |
| "step": 16460 |
| }, |
| { |
| "epoch": 9.24, |
| "learning_rate": 2.5623136440837895e-05, |
| "loss": 0.3174, |
| "step": 16470 |
| }, |
| { |
| "epoch": 9.24, |
| "learning_rate": 2.562030571806001e-05, |
| "loss": 0.321, |
| "step": 16480 |
| }, |
| { |
| "epoch": 9.25, |
| "learning_rate": 2.561747499528213e-05, |
| "loss": 0.3364, |
| "step": 16490 |
| }, |
| { |
| "epoch": 9.25, |
| "learning_rate": 2.561464427250425e-05, |
| "loss": 0.3495, |
| "step": 16500 |
| }, |
| { |
| "epoch": 9.26, |
| "learning_rate": 2.5611813549726365e-05, |
| "loss": 0.3102, |
| "step": 16510 |
| }, |
| { |
| "epoch": 9.27, |
| "learning_rate": 2.560898282694848e-05, |
| "loss": 0.307, |
| "step": 16520 |
| }, |
| { |
| "epoch": 9.27, |
| "learning_rate": 2.56061521041706e-05, |
| "loss": 0.3128, |
| "step": 16530 |
| }, |
| { |
| "epoch": 9.28, |
| "learning_rate": 2.5603321381392715e-05, |
| "loss": 0.3458, |
| "step": 16540 |
| }, |
| { |
| "epoch": 9.28, |
| "learning_rate": 2.5600490658614835e-05, |
| "loss": 0.3332, |
| "step": 16550 |
| }, |
| { |
| "epoch": 9.29, |
| "learning_rate": 2.559765993583695e-05, |
| "loss": 0.3224, |
| "step": 16560 |
| }, |
| { |
| "epoch": 9.29, |
| "learning_rate": 2.559482921305907e-05, |
| "loss": 0.3208, |
| "step": 16570 |
| }, |
| { |
| "epoch": 9.3, |
| "learning_rate": 2.5591998490281185e-05, |
| "loss": 0.313, |
| "step": 16580 |
| }, |
| { |
| "epoch": 9.3, |
| "learning_rate": 2.5589167767503305e-05, |
| "loss": 0.3272, |
| "step": 16590 |
| }, |
| { |
| "epoch": 9.31, |
| "learning_rate": 2.558633704472542e-05, |
| "loss": 0.3352, |
| "step": 16600 |
| }, |
| { |
| "epoch": 9.32, |
| "learning_rate": 2.558350632194754e-05, |
| "loss": 0.3337, |
| "step": 16610 |
| }, |
| { |
| "epoch": 9.32, |
| "learning_rate": 2.5580675599169655e-05, |
| "loss": 0.3295, |
| "step": 16620 |
| }, |
| { |
| "epoch": 9.33, |
| "learning_rate": 2.557784487639177e-05, |
| "loss": 0.3214, |
| "step": 16630 |
| }, |
| { |
| "epoch": 9.33, |
| "learning_rate": 2.557501415361389e-05, |
| "loss": 0.3259, |
| "step": 16640 |
| }, |
| { |
| "epoch": 9.34, |
| "learning_rate": 2.5572183430836006e-05, |
| "loss": 0.3518, |
| "step": 16650 |
| }, |
| { |
| "epoch": 9.34, |
| "learning_rate": 2.5569352708058125e-05, |
| "loss": 0.3248, |
| "step": 16660 |
| }, |
| { |
| "epoch": 9.35, |
| "learning_rate": 2.556652198528024e-05, |
| "loss": 0.3203, |
| "step": 16670 |
| }, |
| { |
| "epoch": 9.35, |
| "learning_rate": 2.556369126250236e-05, |
| "loss": 0.3104, |
| "step": 16680 |
| }, |
| { |
| "epoch": 9.36, |
| "learning_rate": 2.5560860539724476e-05, |
| "loss": 0.3177, |
| "step": 16690 |
| }, |
| { |
| "epoch": 9.37, |
| "learning_rate": 2.5558029816946595e-05, |
| "loss": 0.3308, |
| "step": 16700 |
| }, |
| { |
| "epoch": 9.37, |
| "learning_rate": 2.555519909416871e-05, |
| "loss": 0.3217, |
| "step": 16710 |
| }, |
| { |
| "epoch": 9.38, |
| "learning_rate": 2.5552368371390827e-05, |
| "loss": 0.3356, |
| "step": 16720 |
| }, |
| { |
| "epoch": 9.38, |
| "learning_rate": 2.5549537648612946e-05, |
| "loss": 0.3314, |
| "step": 16730 |
| }, |
| { |
| "epoch": 9.39, |
| "learning_rate": 2.5546706925835065e-05, |
| "loss": 0.3329, |
| "step": 16740 |
| }, |
| { |
| "epoch": 9.39, |
| "learning_rate": 2.554387620305718e-05, |
| "loss": 0.3478, |
| "step": 16750 |
| }, |
| { |
| "epoch": 9.4, |
| "learning_rate": 2.5541045480279297e-05, |
| "loss": 0.314, |
| "step": 16760 |
| }, |
| { |
| "epoch": 9.41, |
| "learning_rate": 2.5538214757501416e-05, |
| "loss": 0.3073, |
| "step": 16770 |
| }, |
| { |
| "epoch": 9.41, |
| "learning_rate": 2.5535384034723535e-05, |
| "loss": 0.3216, |
| "step": 16780 |
| }, |
| { |
| "epoch": 9.42, |
| "learning_rate": 2.553255331194565e-05, |
| "loss": 0.3222, |
| "step": 16790 |
| }, |
| { |
| "epoch": 9.42, |
| "learning_rate": 2.5529722589167767e-05, |
| "loss": 0.3306, |
| "step": 16800 |
| }, |
| { |
| "epoch": 9.43, |
| "learning_rate": 2.5526891866389883e-05, |
| "loss": 0.341, |
| "step": 16810 |
| }, |
| { |
| "epoch": 9.43, |
| "learning_rate": 2.5524061143612005e-05, |
| "loss": 0.3214, |
| "step": 16820 |
| }, |
| { |
| "epoch": 9.44, |
| "learning_rate": 2.552123042083412e-05, |
| "loss": 0.3133, |
| "step": 16830 |
| }, |
| { |
| "epoch": 9.44, |
| "learning_rate": 2.5518399698056237e-05, |
| "loss": 0.3163, |
| "step": 16840 |
| }, |
| { |
| "epoch": 9.45, |
| "learning_rate": 2.5515568975278353e-05, |
| "loss": 0.3219, |
| "step": 16850 |
| }, |
| { |
| "epoch": 9.46, |
| "learning_rate": 2.5512738252500475e-05, |
| "loss": 0.3265, |
| "step": 16860 |
| }, |
| { |
| "epoch": 9.46, |
| "learning_rate": 2.550990752972259e-05, |
| "loss": 0.3147, |
| "step": 16870 |
| }, |
| { |
| "epoch": 9.47, |
| "learning_rate": 2.5507076806944707e-05, |
| "loss": 0.3171, |
| "step": 16880 |
| }, |
| { |
| "epoch": 9.47, |
| "learning_rate": 2.5504246084166823e-05, |
| "loss": 0.3275, |
| "step": 16890 |
| }, |
| { |
| "epoch": 9.48, |
| "learning_rate": 2.5501415361388942e-05, |
| "loss": 0.3425, |
| "step": 16900 |
| }, |
| { |
| "epoch": 9.48, |
| "learning_rate": 2.549858463861106e-05, |
| "loss": 0.3258, |
| "step": 16910 |
| }, |
| { |
| "epoch": 9.49, |
| "learning_rate": 2.5495753915833177e-05, |
| "loss": 0.3155, |
| "step": 16920 |
| }, |
| { |
| "epoch": 9.5, |
| "learning_rate": 2.5492923193055293e-05, |
| "loss": 0.3169, |
| "step": 16930 |
| }, |
| { |
| "epoch": 9.5, |
| "learning_rate": 2.5490092470277412e-05, |
| "loss": 0.3243, |
| "step": 16940 |
| }, |
| { |
| "epoch": 9.51, |
| "learning_rate": 2.548726174749953e-05, |
| "loss": 0.3085, |
| "step": 16950 |
| }, |
| { |
| "epoch": 9.51, |
| "learning_rate": 2.5484431024721647e-05, |
| "loss": 0.3147, |
| "step": 16960 |
| }, |
| { |
| "epoch": 9.52, |
| "learning_rate": 2.5481600301943763e-05, |
| "loss": 0.3143, |
| "step": 16970 |
| }, |
| { |
| "epoch": 9.52, |
| "learning_rate": 2.5478769579165882e-05, |
| "loss": 0.3179, |
| "step": 16980 |
| }, |
| { |
| "epoch": 9.53, |
| "learning_rate": 2.5475938856387998e-05, |
| "loss": 0.3291, |
| "step": 16990 |
| }, |
| { |
| "epoch": 9.53, |
| "learning_rate": 2.5473108133610117e-05, |
| "loss": 0.3476, |
| "step": 17000 |
| }, |
| { |
| "epoch": 9.54, |
| "learning_rate": 2.5470277410832233e-05, |
| "loss": 0.3205, |
| "step": 17010 |
| }, |
| { |
| "epoch": 9.55, |
| "learning_rate": 2.5467446688054352e-05, |
| "loss": 0.3209, |
| "step": 17020 |
| }, |
| { |
| "epoch": 9.55, |
| "learning_rate": 2.5464615965276468e-05, |
| "loss": 0.3146, |
| "step": 17030 |
| }, |
| { |
| "epoch": 9.56, |
| "learning_rate": 2.5461785242498587e-05, |
| "loss": 0.3282, |
| "step": 17040 |
| }, |
| { |
| "epoch": 9.56, |
| "learning_rate": 2.5458954519720703e-05, |
| "loss": 0.3308, |
| "step": 17050 |
| }, |
| { |
| "epoch": 9.57, |
| "learning_rate": 2.545612379694282e-05, |
| "loss": 0.3263, |
| "step": 17060 |
| }, |
| { |
| "epoch": 9.57, |
| "learning_rate": 2.5453293074164938e-05, |
| "loss": 0.3229, |
| "step": 17070 |
| }, |
| { |
| "epoch": 9.58, |
| "learning_rate": 2.5450462351387053e-05, |
| "loss": 0.3091, |
| "step": 17080 |
| }, |
| { |
| "epoch": 9.58, |
| "learning_rate": 2.5447631628609173e-05, |
| "loss": 0.3115, |
| "step": 17090 |
| }, |
| { |
| "epoch": 9.59, |
| "learning_rate": 2.544480090583129e-05, |
| "loss": 0.3589, |
| "step": 17100 |
| }, |
| { |
| "epoch": 9.6, |
| "learning_rate": 2.5441970183053408e-05, |
| "loss": 0.3125, |
| "step": 17110 |
| }, |
| { |
| "epoch": 9.6, |
| "learning_rate": 2.5439139460275523e-05, |
| "loss": 0.3196, |
| "step": 17120 |
| }, |
| { |
| "epoch": 9.61, |
| "learning_rate": 2.5436308737497643e-05, |
| "loss": 0.327, |
| "step": 17130 |
| }, |
| { |
| "epoch": 9.61, |
| "learning_rate": 2.543347801471976e-05, |
| "loss": 0.33, |
| "step": 17140 |
| }, |
| { |
| "epoch": 9.62, |
| "learning_rate": 2.5430647291941878e-05, |
| "loss": 0.332, |
| "step": 17150 |
| }, |
| { |
| "epoch": 9.62, |
| "learning_rate": 2.5427816569163993e-05, |
| "loss": 0.3284, |
| "step": 17160 |
| }, |
| { |
| "epoch": 9.63, |
| "learning_rate": 2.542498584638611e-05, |
| "loss": 0.316, |
| "step": 17170 |
| }, |
| { |
| "epoch": 9.64, |
| "learning_rate": 2.542215512360823e-05, |
| "loss": 0.3133, |
| "step": 17180 |
| }, |
| { |
| "epoch": 9.64, |
| "learning_rate": 2.5419324400830348e-05, |
| "loss": 0.3205, |
| "step": 17190 |
| }, |
| { |
| "epoch": 9.65, |
| "learning_rate": 2.5416493678052463e-05, |
| "loss": 0.3268, |
| "step": 17200 |
| }, |
| { |
| "epoch": 9.65, |
| "learning_rate": 2.541366295527458e-05, |
| "loss": 0.2987, |
| "step": 17210 |
| }, |
| { |
| "epoch": 9.66, |
| "learning_rate": 2.54108322324967e-05, |
| "loss": 0.3107, |
| "step": 17220 |
| }, |
| { |
| "epoch": 9.66, |
| "learning_rate": 2.5408001509718818e-05, |
| "loss": 0.3187, |
| "step": 17230 |
| }, |
| { |
| "epoch": 9.67, |
| "learning_rate": 2.5405170786940933e-05, |
| "loss": 0.3299, |
| "step": 17240 |
| }, |
| { |
| "epoch": 9.67, |
| "learning_rate": 2.540234006416305e-05, |
| "loss": 0.334, |
| "step": 17250 |
| }, |
| { |
| "epoch": 9.68, |
| "learning_rate": 2.5399509341385165e-05, |
| "loss": 0.325, |
| "step": 17260 |
| }, |
| { |
| "epoch": 9.69, |
| "learning_rate": 2.5396678618607287e-05, |
| "loss": 0.3151, |
| "step": 17270 |
| }, |
| { |
| "epoch": 9.69, |
| "learning_rate": 2.5393847895829403e-05, |
| "loss": 0.3094, |
| "step": 17280 |
| }, |
| { |
| "epoch": 9.7, |
| "learning_rate": 2.539101717305152e-05, |
| "loss": 0.3317, |
| "step": 17290 |
| }, |
| { |
| "epoch": 9.7, |
| "learning_rate": 2.5388186450273635e-05, |
| "loss": 0.3379, |
| "step": 17300 |
| }, |
| { |
| "epoch": 9.71, |
| "learning_rate": 2.5385355727495757e-05, |
| "loss": 0.3322, |
| "step": 17310 |
| }, |
| { |
| "epoch": 9.71, |
| "learning_rate": 2.5382525004717873e-05, |
| "loss": 0.3134, |
| "step": 17320 |
| }, |
| { |
| "epoch": 9.72, |
| "learning_rate": 2.537969428193999e-05, |
| "loss": 0.326, |
| "step": 17330 |
| }, |
| { |
| "epoch": 9.72, |
| "learning_rate": 2.5376863559162105e-05, |
| "loss": 0.324, |
| "step": 17340 |
| }, |
| { |
| "epoch": 9.73, |
| "learning_rate": 2.5374032836384224e-05, |
| "loss": 0.3308, |
| "step": 17350 |
| }, |
| { |
| "epoch": 9.74, |
| "learning_rate": 2.5371202113606343e-05, |
| "loss": 0.3291, |
| "step": 17360 |
| }, |
| { |
| "epoch": 9.74, |
| "learning_rate": 2.536837139082846e-05, |
| "loss": 0.3102, |
| "step": 17370 |
| }, |
| { |
| "epoch": 9.75, |
| "learning_rate": 2.5365540668050575e-05, |
| "loss": 0.3124, |
| "step": 17380 |
| }, |
| { |
| "epoch": 9.75, |
| "learning_rate": 2.5362709945272694e-05, |
| "loss": 0.3304, |
| "step": 17390 |
| }, |
| { |
| "epoch": 9.76, |
| "learning_rate": 2.5359879222494813e-05, |
| "loss": 0.3424, |
| "step": 17400 |
| }, |
| { |
| "epoch": 9.76, |
| "learning_rate": 2.535704849971693e-05, |
| "loss": 0.3139, |
| "step": 17410 |
| }, |
| { |
| "epoch": 9.77, |
| "learning_rate": 2.5354217776939045e-05, |
| "loss": 0.3173, |
| "step": 17420 |
| }, |
| { |
| "epoch": 9.78, |
| "learning_rate": 2.5351387054161164e-05, |
| "loss": 0.3273, |
| "step": 17430 |
| }, |
| { |
| "epoch": 9.78, |
| "learning_rate": 2.534855633138328e-05, |
| "loss": 0.3336, |
| "step": 17440 |
| }, |
| { |
| "epoch": 9.79, |
| "learning_rate": 2.53457256086054e-05, |
| "loss": 0.3377, |
| "step": 17450 |
| }, |
| { |
| "epoch": 9.79, |
| "learning_rate": 2.5342894885827515e-05, |
| "loss": 0.3176, |
| "step": 17460 |
| }, |
| { |
| "epoch": 9.8, |
| "learning_rate": 2.534006416304963e-05, |
| "loss": 0.3207, |
| "step": 17470 |
| }, |
| { |
| "epoch": 9.8, |
| "learning_rate": 2.533723344027175e-05, |
| "loss": 0.3145, |
| "step": 17480 |
| }, |
| { |
| "epoch": 9.81, |
| "learning_rate": 2.533440271749387e-05, |
| "loss": 0.3358, |
| "step": 17490 |
| }, |
| { |
| "epoch": 9.81, |
| "learning_rate": 2.5331571994715985e-05, |
| "loss": 0.3592, |
| "step": 17500 |
| }, |
| { |
| "epoch": 9.82, |
| "learning_rate": 2.53287412719381e-05, |
| "loss": 0.3165, |
| "step": 17510 |
| }, |
| { |
| "epoch": 9.83, |
| "learning_rate": 2.532591054916022e-05, |
| "loss": 0.3218, |
| "step": 17520 |
| }, |
| { |
| "epoch": 9.83, |
| "learning_rate": 2.5323079826382336e-05, |
| "loss": 0.3216, |
| "step": 17530 |
| }, |
| { |
| "epoch": 9.84, |
| "learning_rate": 2.5320249103604455e-05, |
| "loss": 0.3201, |
| "step": 17540 |
| }, |
| { |
| "epoch": 9.84, |
| "learning_rate": 2.531741838082657e-05, |
| "loss": 0.345, |
| "step": 17550 |
| }, |
| { |
| "epoch": 9.85, |
| "learning_rate": 2.531458765804869e-05, |
| "loss": 0.316, |
| "step": 17560 |
| }, |
| { |
| "epoch": 9.85, |
| "learning_rate": 2.5311756935270806e-05, |
| "loss": 0.3167, |
| "step": 17570 |
| }, |
| { |
| "epoch": 9.86, |
| "learning_rate": 2.5308926212492925e-05, |
| "loss": 0.3194, |
| "step": 17580 |
| }, |
| { |
| "epoch": 9.87, |
| "learning_rate": 2.530609548971504e-05, |
| "loss": 0.3356, |
| "step": 17590 |
| }, |
| { |
| "epoch": 9.87, |
| "learning_rate": 2.530326476693716e-05, |
| "loss": 0.3336, |
| "step": 17600 |
| }, |
| { |
| "epoch": 9.88, |
| "learning_rate": 2.5300434044159276e-05, |
| "loss": 0.2998, |
| "step": 17610 |
| }, |
| { |
| "epoch": 9.88, |
| "learning_rate": 2.529760332138139e-05, |
| "loss": 0.3116, |
| "step": 17620 |
| }, |
| { |
| "epoch": 9.89, |
| "learning_rate": 2.529477259860351e-05, |
| "loss": 0.322, |
| "step": 17630 |
| }, |
| { |
| "epoch": 9.89, |
| "learning_rate": 2.529194187582563e-05, |
| "loss": 0.3302, |
| "step": 17640 |
| }, |
| { |
| "epoch": 9.9, |
| "learning_rate": 2.5289111153047746e-05, |
| "loss": 0.3326, |
| "step": 17650 |
| }, |
| { |
| "epoch": 9.9, |
| "learning_rate": 2.528628043026986e-05, |
| "loss": 0.3242, |
| "step": 17660 |
| }, |
| { |
| "epoch": 9.91, |
| "learning_rate": 2.528344970749198e-05, |
| "loss": 0.3051, |
| "step": 17670 |
| }, |
| { |
| "epoch": 9.92, |
| "learning_rate": 2.52806189847141e-05, |
| "loss": 0.3266, |
| "step": 17680 |
| }, |
| { |
| "epoch": 9.92, |
| "learning_rate": 2.5277788261936215e-05, |
| "loss": 0.3284, |
| "step": 17690 |
| }, |
| { |
| "epoch": 9.93, |
| "learning_rate": 2.527495753915833e-05, |
| "loss": 0.3278, |
| "step": 17700 |
| }, |
| { |
| "epoch": 9.93, |
| "learning_rate": 2.5272126816380447e-05, |
| "loss": 0.3179, |
| "step": 17710 |
| }, |
| { |
| "epoch": 9.94, |
| "learning_rate": 2.526929609360257e-05, |
| "loss": 0.3228, |
| "step": 17720 |
| }, |
| { |
| "epoch": 9.94, |
| "learning_rate": 2.5266465370824685e-05, |
| "loss": 0.3085, |
| "step": 17730 |
| }, |
| { |
| "epoch": 9.95, |
| "learning_rate": 2.52636346480468e-05, |
| "loss": 0.3199, |
| "step": 17740 |
| }, |
| { |
| "epoch": 9.95, |
| "learning_rate": 2.5260803925268917e-05, |
| "loss": 0.3343, |
| "step": 17750 |
| }, |
| { |
| "epoch": 9.96, |
| "learning_rate": 2.525797320249104e-05, |
| "loss": 0.3161, |
| "step": 17760 |
| }, |
| { |
| "epoch": 9.97, |
| "learning_rate": 2.5255142479713155e-05, |
| "loss": 0.3152, |
| "step": 17770 |
| }, |
| { |
| "epoch": 9.97, |
| "learning_rate": 2.525231175693527e-05, |
| "loss": 0.3031, |
| "step": 17780 |
| }, |
| { |
| "epoch": 9.98, |
| "learning_rate": 2.5249481034157387e-05, |
| "loss": 0.3258, |
| "step": 17790 |
| }, |
| { |
| "epoch": 9.98, |
| "learning_rate": 2.5246650311379506e-05, |
| "loss": 0.3359, |
| "step": 17800 |
| }, |
| { |
| "epoch": 9.99, |
| "learning_rate": 2.5243819588601625e-05, |
| "loss": 0.3266, |
| "step": 17810 |
| }, |
| { |
| "epoch": 9.99, |
| "learning_rate": 2.524098886582374e-05, |
| "loss": 0.3102, |
| "step": 17820 |
| }, |
| { |
| "epoch": 10.0, |
| "learning_rate": 2.5238158143045857e-05, |
| "loss": 0.3324, |
| "step": 17830 |
| }, |
| { |
| "epoch": 10.0, |
| "eval_cer": 0.17398120286096688, |
| "eval_loss": 0.42743468284606934, |
| "eval_runtime": 315.5569, |
| "eval_samples_per_second": 16.869, |
| "eval_steps_per_second": 4.218, |
| "eval_wer": 0.23444660326908567, |
| "step": 17830 |
| }, |
| { |
| "epoch": 10.01, |
| "learning_rate": 2.5235327420267976e-05, |
| "loss": 0.3313, |
| "step": 17840 |
| }, |
| { |
| "epoch": 10.01, |
| "learning_rate": 2.5232496697490095e-05, |
| "loss": 0.3142, |
| "step": 17850 |
| }, |
| { |
| "epoch": 10.02, |
| "learning_rate": 2.522966597471221e-05, |
| "loss": 0.3062, |
| "step": 17860 |
| }, |
| { |
| "epoch": 10.02, |
| "learning_rate": 2.5226835251934327e-05, |
| "loss": 0.3118, |
| "step": 17870 |
| }, |
| { |
| "epoch": 10.03, |
| "learning_rate": 2.5224004529156443e-05, |
| "loss": 0.3264, |
| "step": 17880 |
| }, |
| { |
| "epoch": 10.03, |
| "learning_rate": 2.5221173806378562e-05, |
| "loss": 0.315, |
| "step": 17890 |
| }, |
| { |
| "epoch": 10.04, |
| "learning_rate": 2.521834308360068e-05, |
| "loss": 0.3125, |
| "step": 17900 |
| }, |
| { |
| "epoch": 10.04, |
| "learning_rate": 2.5215512360822797e-05, |
| "loss": 0.3158, |
| "step": 17910 |
| }, |
| { |
| "epoch": 10.05, |
| "learning_rate": 2.5212681638044913e-05, |
| "loss": 0.3214, |
| "step": 17920 |
| }, |
| { |
| "epoch": 10.06, |
| "learning_rate": 2.5209850915267032e-05, |
| "loss": 0.3168, |
| "step": 17930 |
| }, |
| { |
| "epoch": 10.06, |
| "learning_rate": 2.520702019248915e-05, |
| "loss": 0.3128, |
| "step": 17940 |
| }, |
| { |
| "epoch": 10.07, |
| "learning_rate": 2.5204189469711267e-05, |
| "loss": 0.3077, |
| "step": 17950 |
| }, |
| { |
| "epoch": 10.07, |
| "learning_rate": 2.5201358746933383e-05, |
| "loss": 0.3067, |
| "step": 17960 |
| }, |
| { |
| "epoch": 10.08, |
| "learning_rate": 2.5198528024155502e-05, |
| "loss": 0.2977, |
| "step": 17970 |
| }, |
| { |
| "epoch": 10.08, |
| "learning_rate": 2.5195697301377618e-05, |
| "loss": 0.3367, |
| "step": 17980 |
| }, |
| { |
| "epoch": 10.09, |
| "learning_rate": 2.5192866578599737e-05, |
| "loss": 0.3096, |
| "step": 17990 |
| }, |
| { |
| "epoch": 10.1, |
| "learning_rate": 2.5190035855821853e-05, |
| "loss": 0.3095, |
| "step": 18000 |
| }, |
| { |
| "epoch": 10.1, |
| "learning_rate": 2.5187205133043972e-05, |
| "loss": 0.3248, |
| "step": 18010 |
| }, |
| { |
| "epoch": 10.11, |
| "learning_rate": 2.5184374410266088e-05, |
| "loss": 0.2967, |
| "step": 18020 |
| }, |
| { |
| "epoch": 10.11, |
| "learning_rate": 2.5181543687488207e-05, |
| "loss": 0.3124, |
| "step": 18030 |
| }, |
| { |
| "epoch": 10.12, |
| "learning_rate": 2.5178712964710323e-05, |
| "loss": 0.3027, |
| "step": 18040 |
| }, |
| { |
| "epoch": 10.12, |
| "learning_rate": 2.5175882241932442e-05, |
| "loss": 0.3079, |
| "step": 18050 |
| }, |
| { |
| "epoch": 10.13, |
| "learning_rate": 2.5173051519154558e-05, |
| "loss": 0.3023, |
| "step": 18060 |
| }, |
| { |
| "epoch": 10.13, |
| "learning_rate": 2.5170220796376673e-05, |
| "loss": 0.2908, |
| "step": 18070 |
| }, |
| { |
| "epoch": 10.14, |
| "learning_rate": 2.5167390073598793e-05, |
| "loss": 0.3213, |
| "step": 18080 |
| }, |
| { |
| "epoch": 10.15, |
| "learning_rate": 2.5164559350820912e-05, |
| "loss": 0.3196, |
| "step": 18090 |
| }, |
| { |
| "epoch": 10.15, |
| "learning_rate": 2.5161728628043028e-05, |
| "loss": 0.3196, |
| "step": 18100 |
| }, |
| { |
| "epoch": 10.16, |
| "learning_rate": 2.5158897905265143e-05, |
| "loss": 0.3016, |
| "step": 18110 |
| }, |
| { |
| "epoch": 10.16, |
| "learning_rate": 2.5156067182487263e-05, |
| "loss": 0.3167, |
| "step": 18120 |
| }, |
| { |
| "epoch": 10.17, |
| "learning_rate": 2.5153236459709382e-05, |
| "loss": 0.3118, |
| "step": 18130 |
| }, |
| { |
| "epoch": 10.17, |
| "learning_rate": 2.5150405736931498e-05, |
| "loss": 0.3053, |
| "step": 18140 |
| }, |
| { |
| "epoch": 10.18, |
| "learning_rate": 2.5147575014153613e-05, |
| "loss": 0.3057, |
| "step": 18150 |
| }, |
| { |
| "epoch": 10.19, |
| "learning_rate": 2.514474429137573e-05, |
| "loss": 0.2929, |
| "step": 18160 |
| }, |
| { |
| "epoch": 10.19, |
| "learning_rate": 2.5141913568597852e-05, |
| "loss": 0.3003, |
| "step": 18170 |
| }, |
| { |
| "epoch": 10.2, |
| "learning_rate": 2.5139082845819968e-05, |
| "loss": 0.3013, |
| "step": 18180 |
| }, |
| { |
| "epoch": 10.2, |
| "learning_rate": 2.5136252123042083e-05, |
| "loss": 0.3018, |
| "step": 18190 |
| }, |
| { |
| "epoch": 10.21, |
| "learning_rate": 2.51334214002642e-05, |
| "loss": 0.2973, |
| "step": 18200 |
| }, |
| { |
| "epoch": 10.21, |
| "learning_rate": 2.5130590677486322e-05, |
| "loss": 0.3003, |
| "step": 18210 |
| }, |
| { |
| "epoch": 10.22, |
| "learning_rate": 2.5127759954708438e-05, |
| "loss": 0.3011, |
| "step": 18220 |
| }, |
| { |
| "epoch": 10.22, |
| "learning_rate": 2.5124929231930553e-05, |
| "loss": 0.326, |
| "step": 18230 |
| }, |
| { |
| "epoch": 10.23, |
| "learning_rate": 2.512209850915267e-05, |
| "loss": 0.3051, |
| "step": 18240 |
| }, |
| { |
| "epoch": 10.24, |
| "learning_rate": 2.511926778637479e-05, |
| "loss": 0.2939, |
| "step": 18250 |
| }, |
| { |
| "epoch": 10.24, |
| "learning_rate": 2.5116437063596908e-05, |
| "loss": 0.3208, |
| "step": 18260 |
| }, |
| { |
| "epoch": 10.25, |
| "learning_rate": 2.5113606340819023e-05, |
| "loss": 0.2989, |
| "step": 18270 |
| }, |
| { |
| "epoch": 10.25, |
| "learning_rate": 2.511077561804114e-05, |
| "loss": 0.3341, |
| "step": 18280 |
| }, |
| { |
| "epoch": 10.26, |
| "learning_rate": 2.5107944895263255e-05, |
| "loss": 0.3218, |
| "step": 18290 |
| }, |
| { |
| "epoch": 10.26, |
| "learning_rate": 2.5105114172485378e-05, |
| "loss": 0.3061, |
| "step": 18300 |
| }, |
| { |
| "epoch": 10.27, |
| "learning_rate": 2.5102283449707493e-05, |
| "loss": 0.315, |
| "step": 18310 |
| }, |
| { |
| "epoch": 10.27, |
| "learning_rate": 2.509945272692961e-05, |
| "loss": 0.3178, |
| "step": 18320 |
| }, |
| { |
| "epoch": 10.28, |
| "learning_rate": 2.5096622004151725e-05, |
| "loss": 0.3404, |
| "step": 18330 |
| }, |
| { |
| "epoch": 10.29, |
| "learning_rate": 2.5093791281373844e-05, |
| "loss": 0.2934, |
| "step": 18340 |
| }, |
| { |
| "epoch": 10.29, |
| "learning_rate": 2.5090960558595963e-05, |
| "loss": 0.3012, |
| "step": 18350 |
| }, |
| { |
| "epoch": 10.3, |
| "learning_rate": 2.508812983581808e-05, |
| "loss": 0.3088, |
| "step": 18360 |
| }, |
| { |
| "epoch": 10.3, |
| "learning_rate": 2.5085299113040195e-05, |
| "loss": 0.3066, |
| "step": 18370 |
| }, |
| { |
| "epoch": 10.31, |
| "learning_rate": 2.5082468390262314e-05, |
| "loss": 0.3003, |
| "step": 18380 |
| }, |
| { |
| "epoch": 10.31, |
| "learning_rate": 2.5079637667484433e-05, |
| "loss": 0.3175, |
| "step": 18390 |
| }, |
| { |
| "epoch": 10.32, |
| "learning_rate": 2.507680694470655e-05, |
| "loss": 0.3103, |
| "step": 18400 |
| }, |
| { |
| "epoch": 10.33, |
| "learning_rate": 2.5073976221928665e-05, |
| "loss": 0.3031, |
| "step": 18410 |
| }, |
| { |
| "epoch": 10.33, |
| "learning_rate": 2.5071145499150784e-05, |
| "loss": 0.2958, |
| "step": 18420 |
| }, |
| { |
| "epoch": 10.34, |
| "learning_rate": 2.50683147763729e-05, |
| "loss": 0.3093, |
| "step": 18430 |
| }, |
| { |
| "epoch": 10.34, |
| "learning_rate": 2.506548405359502e-05, |
| "loss": 0.3182, |
| "step": 18440 |
| }, |
| { |
| "epoch": 10.35, |
| "learning_rate": 2.5062653330817135e-05, |
| "loss": 0.3037, |
| "step": 18450 |
| }, |
| { |
| "epoch": 10.35, |
| "learning_rate": 2.5059822608039254e-05, |
| "loss": 0.3008, |
| "step": 18460 |
| }, |
| { |
| "epoch": 10.36, |
| "learning_rate": 2.505699188526137e-05, |
| "loss": 0.2952, |
| "step": 18470 |
| }, |
| { |
| "epoch": 10.36, |
| "learning_rate": 2.505416116248349e-05, |
| "loss": 0.3138, |
| "step": 18480 |
| }, |
| { |
| "epoch": 10.37, |
| "learning_rate": 2.5051330439705605e-05, |
| "loss": 0.3131, |
| "step": 18490 |
| }, |
| { |
| "epoch": 10.38, |
| "learning_rate": 2.5048499716927724e-05, |
| "loss": 0.3196, |
| "step": 18500 |
| }, |
| { |
| "epoch": 10.38, |
| "learning_rate": 2.504566899414984e-05, |
| "loss": 0.304, |
| "step": 18510 |
| }, |
| { |
| "epoch": 10.39, |
| "learning_rate": 2.5042838271371956e-05, |
| "loss": 0.3104, |
| "step": 18520 |
| }, |
| { |
| "epoch": 10.39, |
| "learning_rate": 2.5040007548594075e-05, |
| "loss": 0.3111, |
| "step": 18530 |
| }, |
| { |
| "epoch": 10.4, |
| "learning_rate": 2.5037176825816194e-05, |
| "loss": 0.3173, |
| "step": 18540 |
| }, |
| { |
| "epoch": 10.4, |
| "learning_rate": 2.503434610303831e-05, |
| "loss": 0.3229, |
| "step": 18550 |
| }, |
| { |
| "epoch": 10.41, |
| "learning_rate": 2.5031515380260426e-05, |
| "loss": 0.3073, |
| "step": 18560 |
| }, |
| { |
| "epoch": 10.41, |
| "learning_rate": 2.5028684657482545e-05, |
| "loss": 0.3152, |
| "step": 18570 |
| }, |
| { |
| "epoch": 10.42, |
| "learning_rate": 2.5025853934704664e-05, |
| "loss": 0.3053, |
| "step": 18580 |
| }, |
| { |
| "epoch": 10.43, |
| "learning_rate": 2.502302321192678e-05, |
| "loss": 0.3082, |
| "step": 18590 |
| }, |
| { |
| "epoch": 10.43, |
| "learning_rate": 2.5020192489148896e-05, |
| "loss": 0.3074, |
| "step": 18600 |
| }, |
| { |
| "epoch": 10.44, |
| "learning_rate": 2.501736176637101e-05, |
| "loss": 0.2984, |
| "step": 18610 |
| }, |
| { |
| "epoch": 10.44, |
| "learning_rate": 2.501481411587092e-05, |
| "loss": 0.3034, |
| "step": 18620 |
| }, |
| { |
| "epoch": 10.45, |
| "learning_rate": 2.5011983393093036e-05, |
| "loss": 0.3237, |
| "step": 18630 |
| }, |
| { |
| "epoch": 10.45, |
| "learning_rate": 2.5009152670315155e-05, |
| "loss": 0.314, |
| "step": 18640 |
| }, |
| { |
| "epoch": 10.46, |
| "learning_rate": 2.500632194753727e-05, |
| "loss": 0.3212, |
| "step": 18650 |
| }, |
| { |
| "epoch": 10.47, |
| "learning_rate": 2.500349122475939e-05, |
| "loss": 0.3118, |
| "step": 18660 |
| }, |
| { |
| "epoch": 10.47, |
| "learning_rate": 2.5000660501981506e-05, |
| "loss": 0.3244, |
| "step": 18670 |
| }, |
| { |
| "epoch": 10.48, |
| "learning_rate": 2.4997829779203625e-05, |
| "loss": 0.3239, |
| "step": 18680 |
| }, |
| { |
| "epoch": 10.48, |
| "learning_rate": 2.499499905642574e-05, |
| "loss": 0.3311, |
| "step": 18690 |
| }, |
| { |
| "epoch": 10.49, |
| "learning_rate": 2.499216833364786e-05, |
| "loss": 0.3049, |
| "step": 18700 |
| }, |
| { |
| "epoch": 10.49, |
| "learning_rate": 2.4989337610869976e-05, |
| "loss": 0.2952, |
| "step": 18710 |
| }, |
| { |
| "epoch": 10.5, |
| "learning_rate": 2.4986506888092092e-05, |
| "loss": 0.3067, |
| "step": 18720 |
| }, |
| { |
| "epoch": 10.5, |
| "learning_rate": 2.498367616531421e-05, |
| "loss": 0.3373, |
| "step": 18730 |
| }, |
| { |
| "epoch": 10.51, |
| "learning_rate": 2.498084544253633e-05, |
| "loss": 0.3089, |
| "step": 18740 |
| }, |
| { |
| "epoch": 10.52, |
| "learning_rate": 2.4978014719758446e-05, |
| "loss": 0.2969, |
| "step": 18750 |
| }, |
| { |
| "epoch": 10.52, |
| "learning_rate": 2.4975183996980562e-05, |
| "loss": 0.3083, |
| "step": 18760 |
| }, |
| { |
| "epoch": 10.53, |
| "learning_rate": 2.497235327420268e-05, |
| "loss": 0.3077, |
| "step": 18770 |
| }, |
| { |
| "epoch": 10.53, |
| "learning_rate": 2.49695225514248e-05, |
| "loss": 0.3193, |
| "step": 18780 |
| }, |
| { |
| "epoch": 10.54, |
| "learning_rate": 2.4966691828646916e-05, |
| "loss": 0.312, |
| "step": 18790 |
| }, |
| { |
| "epoch": 10.54, |
| "learning_rate": 2.4963861105869032e-05, |
| "loss": 0.3039, |
| "step": 18800 |
| }, |
| { |
| "epoch": 10.55, |
| "learning_rate": 2.4961030383091148e-05, |
| "loss": 0.3112, |
| "step": 18810 |
| }, |
| { |
| "epoch": 10.56, |
| "learning_rate": 2.495819966031327e-05, |
| "loss": 0.3061, |
| "step": 18820 |
| }, |
| { |
| "epoch": 10.56, |
| "learning_rate": 2.4955368937535386e-05, |
| "loss": 0.3089, |
| "step": 18830 |
| }, |
| { |
| "epoch": 10.57, |
| "learning_rate": 2.4952538214757502e-05, |
| "loss": 0.2987, |
| "step": 18840 |
| }, |
| { |
| "epoch": 10.57, |
| "learning_rate": 2.4949707491979618e-05, |
| "loss": 0.2965, |
| "step": 18850 |
| }, |
| { |
| "epoch": 10.58, |
| "learning_rate": 2.4946876769201737e-05, |
| "loss": 0.2961, |
| "step": 18860 |
| }, |
| { |
| "epoch": 10.58, |
| "learning_rate": 2.4944046046423856e-05, |
| "loss": 0.3191, |
| "step": 18870 |
| }, |
| { |
| "epoch": 10.59, |
| "learning_rate": 2.4941215323645972e-05, |
| "loss": 0.322, |
| "step": 18880 |
| }, |
| { |
| "epoch": 10.59, |
| "learning_rate": 2.4938384600868088e-05, |
| "loss": 0.2978, |
| "step": 18890 |
| }, |
| { |
| "epoch": 10.6, |
| "learning_rate": 2.4935553878090203e-05, |
| "loss": 0.2969, |
| "step": 18900 |
| }, |
| { |
| "epoch": 10.61, |
| "learning_rate": 2.4932723155312326e-05, |
| "loss": 0.2937, |
| "step": 18910 |
| }, |
| { |
| "epoch": 10.61, |
| "learning_rate": 2.4929892432534442e-05, |
| "loss": 0.3088, |
| "step": 18920 |
| }, |
| { |
| "epoch": 10.62, |
| "learning_rate": 2.4927061709756558e-05, |
| "loss": 0.3283, |
| "step": 18930 |
| }, |
| { |
| "epoch": 10.62, |
| "learning_rate": 2.4924230986978673e-05, |
| "loss": 0.3053, |
| "step": 18940 |
| }, |
| { |
| "epoch": 10.63, |
| "learning_rate": 2.4921400264200796e-05, |
| "loss": 0.3089, |
| "step": 18950 |
| }, |
| { |
| "epoch": 10.63, |
| "learning_rate": 2.4918569541422912e-05, |
| "loss": 0.2906, |
| "step": 18960 |
| }, |
| { |
| "epoch": 10.64, |
| "learning_rate": 2.4915738818645028e-05, |
| "loss": 0.2993, |
| "step": 18970 |
| }, |
| { |
| "epoch": 10.64, |
| "learning_rate": 2.4912908095867143e-05, |
| "loss": 0.3337, |
| "step": 18980 |
| }, |
| { |
| "epoch": 10.65, |
| "learning_rate": 2.4910077373089263e-05, |
| "loss": 0.3025, |
| "step": 18990 |
| }, |
| { |
| "epoch": 10.66, |
| "learning_rate": 2.4907246650311382e-05, |
| "loss": 0.3021, |
| "step": 19000 |
| }, |
| { |
| "epoch": 10.66, |
| "learning_rate": 2.4904415927533498e-05, |
| "loss": 0.3028, |
| "step": 19010 |
| }, |
| { |
| "epoch": 10.67, |
| "learning_rate": 2.4901585204755613e-05, |
| "loss": 0.2944, |
| "step": 19020 |
| }, |
| { |
| "epoch": 10.67, |
| "learning_rate": 2.4898754481977733e-05, |
| "loss": 0.3287, |
| "step": 19030 |
| }, |
| { |
| "epoch": 10.68, |
| "learning_rate": 2.4895923759199852e-05, |
| "loss": 0.3004, |
| "step": 19040 |
| }, |
| { |
| "epoch": 10.68, |
| "learning_rate": 2.4893093036421968e-05, |
| "loss": 0.3023, |
| "step": 19050 |
| }, |
| { |
| "epoch": 10.69, |
| "learning_rate": 2.4890262313644083e-05, |
| "loss": 0.3016, |
| "step": 19060 |
| }, |
| { |
| "epoch": 10.7, |
| "learning_rate": 2.4887431590866203e-05, |
| "loss": 0.328, |
| "step": 19070 |
| }, |
| { |
| "epoch": 10.7, |
| "learning_rate": 2.488460086808832e-05, |
| "loss": 0.3141, |
| "step": 19080 |
| }, |
| { |
| "epoch": 10.71, |
| "learning_rate": 2.4881770145310438e-05, |
| "loss": 0.3142, |
| "step": 19090 |
| }, |
| { |
| "epoch": 10.71, |
| "learning_rate": 2.4878939422532553e-05, |
| "loss": 0.3151, |
| "step": 19100 |
| }, |
| { |
| "epoch": 10.72, |
| "learning_rate": 2.4876108699754673e-05, |
| "loss": 0.3031, |
| "step": 19110 |
| }, |
| { |
| "epoch": 10.72, |
| "learning_rate": 2.487327797697679e-05, |
| "loss": 0.3044, |
| "step": 19120 |
| }, |
| { |
| "epoch": 10.73, |
| "learning_rate": 2.4870447254198908e-05, |
| "loss": 0.3438, |
| "step": 19130 |
| }, |
| { |
| "epoch": 10.73, |
| "learning_rate": 2.4867616531421023e-05, |
| "loss": 0.3143, |
| "step": 19140 |
| }, |
| { |
| "epoch": 10.74, |
| "learning_rate": 2.4864785808643142e-05, |
| "loss": 0.2974, |
| "step": 19150 |
| }, |
| { |
| "epoch": 10.75, |
| "learning_rate": 2.4861955085865258e-05, |
| "loss": 0.2995, |
| "step": 19160 |
| }, |
| { |
| "epoch": 10.75, |
| "learning_rate": 2.4859124363087374e-05, |
| "loss": 0.3084, |
| "step": 19170 |
| }, |
| { |
| "epoch": 10.76, |
| "learning_rate": 2.4856293640309493e-05, |
| "loss": 0.325, |
| "step": 19180 |
| }, |
| { |
| "epoch": 10.76, |
| "learning_rate": 2.4853462917531612e-05, |
| "loss": 0.2987, |
| "step": 19190 |
| }, |
| { |
| "epoch": 10.77, |
| "learning_rate": 2.4850632194753728e-05, |
| "loss": 0.297, |
| "step": 19200 |
| }, |
| { |
| "epoch": 10.77, |
| "learning_rate": 2.4847801471975844e-05, |
| "loss": 0.3075, |
| "step": 19210 |
| }, |
| { |
| "epoch": 10.78, |
| "learning_rate": 2.4844970749197963e-05, |
| "loss": 0.3034, |
| "step": 19220 |
| }, |
| { |
| "epoch": 10.78, |
| "learning_rate": 2.4842140026420082e-05, |
| "loss": 0.347, |
| "step": 19230 |
| }, |
| { |
| "epoch": 10.79, |
| "learning_rate": 2.4839309303642198e-05, |
| "loss": 0.3011, |
| "step": 19240 |
| }, |
| { |
| "epoch": 10.8, |
| "learning_rate": 2.4836478580864314e-05, |
| "loss": 0.301, |
| "step": 19250 |
| }, |
| { |
| "epoch": 10.8, |
| "learning_rate": 2.483364785808643e-05, |
| "loss": 0.2986, |
| "step": 19260 |
| }, |
| { |
| "epoch": 10.81, |
| "learning_rate": 2.483081713530855e-05, |
| "loss": 0.3181, |
| "step": 19270 |
| }, |
| { |
| "epoch": 10.81, |
| "learning_rate": 2.4827986412530668e-05, |
| "loss": 0.3127, |
| "step": 19280 |
| }, |
| { |
| "epoch": 10.82, |
| "learning_rate": 2.4825155689752784e-05, |
| "loss": 0.3164, |
| "step": 19290 |
| }, |
| { |
| "epoch": 10.82, |
| "learning_rate": 2.48223249669749e-05, |
| "loss": 0.2879, |
| "step": 19300 |
| }, |
| { |
| "epoch": 10.83, |
| "learning_rate": 2.481949424419702e-05, |
| "loss": 0.3101, |
| "step": 19310 |
| }, |
| { |
| "epoch": 10.84, |
| "learning_rate": 2.4816663521419138e-05, |
| "loss": 0.3036, |
| "step": 19320 |
| }, |
| { |
| "epoch": 10.84, |
| "learning_rate": 2.4813832798641254e-05, |
| "loss": 0.3098, |
| "step": 19330 |
| }, |
| { |
| "epoch": 10.85, |
| "learning_rate": 2.481100207586337e-05, |
| "loss": 0.3098, |
| "step": 19340 |
| }, |
| { |
| "epoch": 10.85, |
| "learning_rate": 2.4808171353085486e-05, |
| "loss": 0.3177, |
| "step": 19350 |
| }, |
| { |
| "epoch": 10.86, |
| "learning_rate": 2.4805340630307608e-05, |
| "loss": 0.3013, |
| "step": 19360 |
| }, |
| { |
| "epoch": 10.86, |
| "learning_rate": 2.4802509907529724e-05, |
| "loss": 0.3015, |
| "step": 19370 |
| }, |
| { |
| "epoch": 10.87, |
| "learning_rate": 2.479967918475184e-05, |
| "loss": 0.3131, |
| "step": 19380 |
| }, |
| { |
| "epoch": 10.87, |
| "learning_rate": 2.4796848461973956e-05, |
| "loss": 0.3064, |
| "step": 19390 |
| }, |
| { |
| "epoch": 10.88, |
| "learning_rate": 2.4794017739196078e-05, |
| "loss": 0.3097, |
| "step": 19400 |
| }, |
| { |
| "epoch": 10.89, |
| "learning_rate": 2.4791187016418194e-05, |
| "loss": 0.3083, |
| "step": 19410 |
| }, |
| { |
| "epoch": 10.89, |
| "learning_rate": 2.478835629364031e-05, |
| "loss": 0.3205, |
| "step": 19420 |
| }, |
| { |
| "epoch": 10.9, |
| "learning_rate": 2.4785525570862426e-05, |
| "loss": 0.3326, |
| "step": 19430 |
| }, |
| { |
| "epoch": 10.9, |
| "learning_rate": 2.4782694848084545e-05, |
| "loss": 0.2968, |
| "step": 19440 |
| }, |
| { |
| "epoch": 10.91, |
| "learning_rate": 2.4779864125306664e-05, |
| "loss": 0.3011, |
| "step": 19450 |
| }, |
| { |
| "epoch": 10.91, |
| "learning_rate": 2.477703340252878e-05, |
| "loss": 0.3116, |
| "step": 19460 |
| }, |
| { |
| "epoch": 10.92, |
| "learning_rate": 2.4774202679750896e-05, |
| "loss": 0.2952, |
| "step": 19470 |
| }, |
| { |
| "epoch": 10.93, |
| "learning_rate": 2.4771371956973015e-05, |
| "loss": 0.328, |
| "step": 19480 |
| }, |
| { |
| "epoch": 10.93, |
| "learning_rate": 2.4768541234195134e-05, |
| "loss": 0.3127, |
| "step": 19490 |
| }, |
| { |
| "epoch": 10.94, |
| "learning_rate": 2.476571051141725e-05, |
| "loss": 0.2972, |
| "step": 19500 |
| }, |
| { |
| "epoch": 10.94, |
| "learning_rate": 2.4762879788639366e-05, |
| "loss": 0.3033, |
| "step": 19510 |
| }, |
| { |
| "epoch": 10.95, |
| "learning_rate": 2.4760049065861485e-05, |
| "loss": 0.3051, |
| "step": 19520 |
| }, |
| { |
| "epoch": 10.95, |
| "learning_rate": 2.47572183430836e-05, |
| "loss": 0.3213, |
| "step": 19530 |
| }, |
| { |
| "epoch": 10.96, |
| "learning_rate": 2.475438762030572e-05, |
| "loss": 0.3, |
| "step": 19540 |
| }, |
| { |
| "epoch": 10.96, |
| "learning_rate": 2.4751556897527835e-05, |
| "loss": 0.3046, |
| "step": 19550 |
| }, |
| { |
| "epoch": 10.97, |
| "learning_rate": 2.4748726174749955e-05, |
| "loss": 0.2931, |
| "step": 19560 |
| }, |
| { |
| "epoch": 10.98, |
| "learning_rate": 2.474589545197207e-05, |
| "loss": 0.311, |
| "step": 19570 |
| }, |
| { |
| "epoch": 10.98, |
| "learning_rate": 2.474306472919419e-05, |
| "loss": 0.3457, |
| "step": 19580 |
| }, |
| { |
| "epoch": 10.99, |
| "learning_rate": 2.4740234006416305e-05, |
| "loss": 0.3062, |
| "step": 19590 |
| }, |
| { |
| "epoch": 10.99, |
| "learning_rate": 2.4737403283638425e-05, |
| "loss": 0.3021, |
| "step": 19600 |
| }, |
| { |
| "epoch": 11.0, |
| "learning_rate": 2.473457256086054e-05, |
| "loss": 0.3192, |
| "step": 19610 |
| }, |
| { |
| "epoch": 11.0, |
| "eval_cer": 0.17184457368586167, |
| "eval_loss": 0.42339661717414856, |
| "eval_runtime": 316.9005, |
| "eval_samples_per_second": 16.797, |
| "eval_steps_per_second": 4.2, |
| "eval_wer": 0.23168073700345207, |
| "step": 19613 |
| }, |
| { |
| "epoch": 11.0, |
| "learning_rate": 2.4731741838082656e-05, |
| "loss": 0.3426, |
| "step": 19620 |
| }, |
| { |
| "epoch": 11.01, |
| "learning_rate": 2.4728911115304775e-05, |
| "loss": 0.287, |
| "step": 19630 |
| }, |
| { |
| "epoch": 11.02, |
| "learning_rate": 2.4726080392526895e-05, |
| "loss": 0.2762, |
| "step": 19640 |
| }, |
| { |
| "epoch": 11.02, |
| "learning_rate": 2.472324966974901e-05, |
| "loss": 0.292, |
| "step": 19650 |
| }, |
| { |
| "epoch": 11.03, |
| "learning_rate": 2.4720418946971126e-05, |
| "loss": 0.2888, |
| "step": 19660 |
| }, |
| { |
| "epoch": 11.03, |
| "learning_rate": 2.4717588224193245e-05, |
| "loss": 0.3168, |
| "step": 19670 |
| }, |
| { |
| "epoch": 11.04, |
| "learning_rate": 2.471475750141536e-05, |
| "loss": 0.3002, |
| "step": 19680 |
| }, |
| { |
| "epoch": 11.04, |
| "learning_rate": 2.471192677863748e-05, |
| "loss": 0.2813, |
| "step": 19690 |
| }, |
| { |
| "epoch": 11.05, |
| "learning_rate": 2.4709096055859596e-05, |
| "loss": 0.3019, |
| "step": 19700 |
| }, |
| { |
| "epoch": 11.05, |
| "learning_rate": 2.4706265333081712e-05, |
| "loss": 0.3071, |
| "step": 19710 |
| }, |
| { |
| "epoch": 11.06, |
| "learning_rate": 2.470343461030383e-05, |
| "loss": 0.2949, |
| "step": 19720 |
| }, |
| { |
| "epoch": 11.07, |
| "learning_rate": 2.470060388752595e-05, |
| "loss": 0.3034, |
| "step": 19730 |
| }, |
| { |
| "epoch": 11.07, |
| "learning_rate": 2.4697773164748066e-05, |
| "loss": 0.2933, |
| "step": 19740 |
| }, |
| { |
| "epoch": 11.08, |
| "learning_rate": 2.4694942441970182e-05, |
| "loss": 0.2871, |
| "step": 19750 |
| }, |
| { |
| "epoch": 11.08, |
| "learning_rate": 2.46921117191923e-05, |
| "loss": 0.2991, |
| "step": 19760 |
| }, |
| { |
| "epoch": 11.09, |
| "learning_rate": 2.468928099641442e-05, |
| "loss": 0.2983, |
| "step": 19770 |
| }, |
| { |
| "epoch": 11.09, |
| "learning_rate": 2.4686450273636536e-05, |
| "loss": 0.3057, |
| "step": 19780 |
| }, |
| { |
| "epoch": 11.1, |
| "learning_rate": 2.4683619550858652e-05, |
| "loss": 0.2985, |
| "step": 19790 |
| }, |
| { |
| "epoch": 11.1, |
| "learning_rate": 2.4680788828080768e-05, |
| "loss": 0.3036, |
| "step": 19800 |
| }, |
| { |
| "epoch": 11.11, |
| "learning_rate": 2.467795810530289e-05, |
| "loss": 0.2998, |
| "step": 19810 |
| }, |
| { |
| "epoch": 11.12, |
| "learning_rate": 2.4675127382525006e-05, |
| "loss": 0.301, |
| "step": 19820 |
| }, |
| { |
| "epoch": 11.12, |
| "learning_rate": 2.4672296659747122e-05, |
| "loss": 0.3089, |
| "step": 19830 |
| }, |
| { |
| "epoch": 11.13, |
| "learning_rate": 2.4669465936969238e-05, |
| "loss": 0.2873, |
| "step": 19840 |
| }, |
| { |
| "epoch": 11.13, |
| "learning_rate": 2.466663521419136e-05, |
| "loss": 0.3005, |
| "step": 19850 |
| }, |
| { |
| "epoch": 11.14, |
| "learning_rate": 2.4663804491413476e-05, |
| "loss": 0.3073, |
| "step": 19860 |
| }, |
| { |
| "epoch": 11.14, |
| "learning_rate": 2.4660973768635592e-05, |
| "loss": 0.3017, |
| "step": 19870 |
| }, |
| { |
| "epoch": 11.15, |
| "learning_rate": 2.4658143045857708e-05, |
| "loss": 0.2941, |
| "step": 19880 |
| }, |
| { |
| "epoch": 11.16, |
| "learning_rate": 2.4655312323079827e-05, |
| "loss": 0.2877, |
| "step": 19890 |
| }, |
| { |
| "epoch": 11.16, |
| "learning_rate": 2.4652481600301946e-05, |
| "loss": 0.2953, |
| "step": 19900 |
| }, |
| { |
| "epoch": 11.17, |
| "learning_rate": 2.4649650877524062e-05, |
| "loss": 0.3058, |
| "step": 19910 |
| }, |
| { |
| "epoch": 11.17, |
| "learning_rate": 2.4646820154746178e-05, |
| "loss": 0.3029, |
| "step": 19920 |
| }, |
| { |
| "epoch": 11.18, |
| "learning_rate": 2.4643989431968297e-05, |
| "loss": 0.2971, |
| "step": 19930 |
| }, |
| { |
| "epoch": 11.18, |
| "learning_rate": 2.4641158709190416e-05, |
| "loss": 0.2885, |
| "step": 19940 |
| }, |
| { |
| "epoch": 11.19, |
| "learning_rate": 2.4638327986412532e-05, |
| "loss": 0.2827, |
| "step": 19950 |
| }, |
| { |
| "epoch": 11.19, |
| "learning_rate": 2.4635497263634648e-05, |
| "loss": 0.2947, |
| "step": 19960 |
| }, |
| { |
| "epoch": 11.2, |
| "learning_rate": 2.4632666540856767e-05, |
| "loss": 0.3021, |
| "step": 19970 |
| }, |
| { |
| "epoch": 11.21, |
| "learning_rate": 2.4629835818078883e-05, |
| "loss": 0.2951, |
| "step": 19980 |
| }, |
| { |
| "epoch": 11.21, |
| "learning_rate": 2.4627005095301002e-05, |
| "loss": 0.2889, |
| "step": 19990 |
| }, |
| { |
| "epoch": 11.22, |
| "learning_rate": 2.4624174372523118e-05, |
| "loss": 0.2853, |
| "step": 20000 |
| }, |
| { |
| "epoch": 11.22, |
| "learning_rate": 2.4621343649745237e-05, |
| "loss": 0.2978, |
| "step": 20010 |
| }, |
| { |
| "epoch": 11.23, |
| "learning_rate": 2.4618512926967353e-05, |
| "loss": 0.2907, |
| "step": 20020 |
| }, |
| { |
| "epoch": 11.23, |
| "learning_rate": 2.4615682204189472e-05, |
| "loss": 0.2983, |
| "step": 20030 |
| }, |
| { |
| "epoch": 11.24, |
| "learning_rate": 2.4612851481411588e-05, |
| "loss": 0.2884, |
| "step": 20040 |
| }, |
| { |
| "epoch": 11.24, |
| "learning_rate": 2.4610020758633707e-05, |
| "loss": 0.3063, |
| "step": 20050 |
| }, |
| { |
| "epoch": 11.25, |
| "learning_rate": 2.4607190035855823e-05, |
| "loss": 0.3043, |
| "step": 20060 |
| }, |
| { |
| "epoch": 11.26, |
| "learning_rate": 2.460435931307794e-05, |
| "loss": 0.3008, |
| "step": 20070 |
| }, |
| { |
| "epoch": 11.26, |
| "learning_rate": 2.4601528590300058e-05, |
| "loss": 0.2997, |
| "step": 20080 |
| }, |
| { |
| "epoch": 11.27, |
| "learning_rate": 2.4598697867522173e-05, |
| "loss": 0.299, |
| "step": 20090 |
| }, |
| { |
| "epoch": 11.27, |
| "learning_rate": 2.4595867144744293e-05, |
| "loss": 0.2921, |
| "step": 20100 |
| }, |
| { |
| "epoch": 11.28, |
| "learning_rate": 2.459303642196641e-05, |
| "loss": 0.2902, |
| "step": 20110 |
| }, |
| { |
| "epoch": 11.28, |
| "learning_rate": 2.4590205699188528e-05, |
| "loss": 0.2944, |
| "step": 20120 |
| }, |
| { |
| "epoch": 11.29, |
| "learning_rate": 2.4587374976410643e-05, |
| "loss": 0.3012, |
| "step": 20130 |
| }, |
| { |
| "epoch": 11.3, |
| "learning_rate": 2.4584544253632763e-05, |
| "loss": 0.2933, |
| "step": 20140 |
| }, |
| { |
| "epoch": 11.3, |
| "learning_rate": 2.458171353085488e-05, |
| "loss": 0.2896, |
| "step": 20150 |
| }, |
| { |
| "epoch": 11.31, |
| "learning_rate": 2.4578882808076994e-05, |
| "loss": 0.3159, |
| "step": 20160 |
| }, |
| { |
| "epoch": 11.31, |
| "learning_rate": 2.4576052085299113e-05, |
| "loss": 0.308, |
| "step": 20170 |
| }, |
| { |
| "epoch": 11.32, |
| "learning_rate": 2.4573221362521233e-05, |
| "loss": 0.298, |
| "step": 20180 |
| }, |
| { |
| "epoch": 11.32, |
| "learning_rate": 2.457039063974335e-05, |
| "loss": 0.2991, |
| "step": 20190 |
| }, |
| { |
| "epoch": 11.33, |
| "learning_rate": 2.4567559916965464e-05, |
| "loss": 0.2972, |
| "step": 20200 |
| }, |
| { |
| "epoch": 11.33, |
| "learning_rate": 2.4564729194187583e-05, |
| "loss": 0.3173, |
| "step": 20210 |
| }, |
| { |
| "epoch": 11.34, |
| "learning_rate": 2.4561898471409703e-05, |
| "loss": 0.2955, |
| "step": 20220 |
| }, |
| { |
| "epoch": 11.35, |
| "learning_rate": 2.455906774863182e-05, |
| "loss": 0.2753, |
| "step": 20230 |
| }, |
| { |
| "epoch": 11.35, |
| "learning_rate": 2.4556237025853934e-05, |
| "loss": 0.2883, |
| "step": 20240 |
| }, |
| { |
| "epoch": 11.36, |
| "learning_rate": 2.455340630307605e-05, |
| "loss": 0.283, |
| "step": 20250 |
| }, |
| { |
| "epoch": 11.36, |
| "learning_rate": 2.4550575580298173e-05, |
| "loss": 0.2968, |
| "step": 20260 |
| }, |
| { |
| "epoch": 11.37, |
| "learning_rate": 2.454774485752029e-05, |
| "loss": 0.2981, |
| "step": 20270 |
| }, |
| { |
| "epoch": 11.37, |
| "learning_rate": 2.4544914134742404e-05, |
| "loss": 0.3038, |
| "step": 20280 |
| }, |
| { |
| "epoch": 11.38, |
| "learning_rate": 2.454208341196452e-05, |
| "loss": 0.2989, |
| "step": 20290 |
| }, |
| { |
| "epoch": 11.39, |
| "learning_rate": 2.4539252689186642e-05, |
| "loss": 0.2823, |
| "step": 20300 |
| }, |
| { |
| "epoch": 11.39, |
| "learning_rate": 2.4536421966408758e-05, |
| "loss": 0.315, |
| "step": 20310 |
| }, |
| { |
| "epoch": 11.4, |
| "learning_rate": 2.4533591243630874e-05, |
| "loss": 0.3301, |
| "step": 20320 |
| }, |
| { |
| "epoch": 11.4, |
| "learning_rate": 2.453076052085299e-05, |
| "loss": 0.2871, |
| "step": 20330 |
| }, |
| { |
| "epoch": 11.41, |
| "learning_rate": 2.452792979807511e-05, |
| "loss": 0.289, |
| "step": 20340 |
| }, |
| { |
| "epoch": 11.41, |
| "learning_rate": 2.4525099075297228e-05, |
| "loss": 0.2848, |
| "step": 20350 |
| }, |
| { |
| "epoch": 11.42, |
| "learning_rate": 2.4522268352519344e-05, |
| "loss": 0.3167, |
| "step": 20360 |
| }, |
| { |
| "epoch": 11.42, |
| "learning_rate": 2.451943762974146e-05, |
| "loss": 0.305, |
| "step": 20370 |
| }, |
| { |
| "epoch": 11.43, |
| "learning_rate": 2.451660690696358e-05, |
| "loss": 0.2856, |
| "step": 20380 |
| }, |
| { |
| "epoch": 11.44, |
| "learning_rate": 2.4513776184185698e-05, |
| "loss": 0.2924, |
| "step": 20390 |
| }, |
| { |
| "epoch": 11.44, |
| "learning_rate": 2.4510945461407814e-05, |
| "loss": 0.2985, |
| "step": 20400 |
| }, |
| { |
| "epoch": 11.45, |
| "learning_rate": 2.450811473862993e-05, |
| "loss": 0.3143, |
| "step": 20410 |
| }, |
| { |
| "epoch": 11.45, |
| "learning_rate": 2.450528401585205e-05, |
| "loss": 0.3011, |
| "step": 20420 |
| }, |
| { |
| "epoch": 11.46, |
| "learning_rate": 2.4502453293074165e-05, |
| "loss": 0.291, |
| "step": 20430 |
| }, |
| { |
| "epoch": 11.46, |
| "learning_rate": 2.4499622570296284e-05, |
| "loss": 0.2906, |
| "step": 20440 |
| }, |
| { |
| "epoch": 11.47, |
| "learning_rate": 2.44967918475184e-05, |
| "loss": 0.2998, |
| "step": 20450 |
| }, |
| { |
| "epoch": 11.47, |
| "learning_rate": 2.449396112474052e-05, |
| "loss": 0.3122, |
| "step": 20460 |
| }, |
| { |
| "epoch": 11.48, |
| "learning_rate": 2.4491130401962635e-05, |
| "loss": 0.3183, |
| "step": 20470 |
| }, |
| { |
| "epoch": 11.49, |
| "learning_rate": 2.4488299679184754e-05, |
| "loss": 0.2844, |
| "step": 20480 |
| }, |
| { |
| "epoch": 11.49, |
| "learning_rate": 2.448546895640687e-05, |
| "loss": 0.2899, |
| "step": 20490 |
| }, |
| { |
| "epoch": 11.5, |
| "learning_rate": 2.4482638233628986e-05, |
| "loss": 0.2947, |
| "step": 20500 |
| }, |
| { |
| "epoch": 11.5, |
| "learning_rate": 2.4479807510851105e-05, |
| "loss": 0.2911, |
| "step": 20510 |
| }, |
| { |
| "epoch": 11.51, |
| "learning_rate": 2.447697678807322e-05, |
| "loss": 0.3078, |
| "step": 20520 |
| }, |
| { |
| "epoch": 11.51, |
| "learning_rate": 2.447414606529534e-05, |
| "loss": 0.2807, |
| "step": 20530 |
| }, |
| { |
| "epoch": 11.52, |
| "learning_rate": 2.4471315342517456e-05, |
| "loss": 0.2924, |
| "step": 20540 |
| }, |
| { |
| "epoch": 11.53, |
| "learning_rate": 2.4468484619739575e-05, |
| "loss": 0.3033, |
| "step": 20550 |
| }, |
| { |
| "epoch": 11.53, |
| "learning_rate": 2.446565389696169e-05, |
| "loss": 0.2971, |
| "step": 20560 |
| }, |
| { |
| "epoch": 11.54, |
| "learning_rate": 2.446282317418381e-05, |
| "loss": 0.3208, |
| "step": 20570 |
| }, |
| { |
| "epoch": 11.54, |
| "learning_rate": 2.4459992451405926e-05, |
| "loss": 0.2977, |
| "step": 20580 |
| }, |
| { |
| "epoch": 11.55, |
| "learning_rate": 2.4457161728628045e-05, |
| "loss": 0.3103, |
| "step": 20590 |
| }, |
| { |
| "epoch": 11.55, |
| "learning_rate": 2.445433100585016e-05, |
| "loss": 0.2858, |
| "step": 20600 |
| }, |
| { |
| "epoch": 11.56, |
| "learning_rate": 2.4451500283072276e-05, |
| "loss": 0.308, |
| "step": 20610 |
| }, |
| { |
| "epoch": 11.56, |
| "learning_rate": 2.4448669560294396e-05, |
| "loss": 0.3088, |
| "step": 20620 |
| }, |
| { |
| "epoch": 11.57, |
| "learning_rate": 2.4445838837516515e-05, |
| "loss": 0.2932, |
| "step": 20630 |
| }, |
| { |
| "epoch": 11.58, |
| "learning_rate": 2.444300811473863e-05, |
| "loss": 0.2928, |
| "step": 20640 |
| }, |
| { |
| "epoch": 11.58, |
| "learning_rate": 2.4440177391960746e-05, |
| "loss": 0.2996, |
| "step": 20650 |
| }, |
| { |
| "epoch": 11.59, |
| "learning_rate": 2.4437346669182866e-05, |
| "loss": 0.3054, |
| "step": 20660 |
| }, |
| { |
| "epoch": 11.59, |
| "learning_rate": 2.4434515946404985e-05, |
| "loss": 0.3093, |
| "step": 20670 |
| }, |
| { |
| "epoch": 11.6, |
| "learning_rate": 2.44316852236271e-05, |
| "loss": 0.3174, |
| "step": 20680 |
| }, |
| { |
| "epoch": 11.6, |
| "learning_rate": 2.4428854500849216e-05, |
| "loss": 0.2801, |
| "step": 20690 |
| }, |
| { |
| "epoch": 11.61, |
| "learning_rate": 2.4426023778071332e-05, |
| "loss": 0.2981, |
| "step": 20700 |
| }, |
| { |
| "epoch": 11.61, |
| "learning_rate": 2.4423193055293455e-05, |
| "loss": 0.2989, |
| "step": 20710 |
| }, |
| { |
| "epoch": 11.62, |
| "learning_rate": 2.442036233251557e-05, |
| "loss": 0.2927, |
| "step": 20720 |
| }, |
| { |
| "epoch": 11.63, |
| "learning_rate": 2.4417531609737686e-05, |
| "loss": 0.2857, |
| "step": 20730 |
| }, |
| { |
| "epoch": 11.63, |
| "learning_rate": 2.4414700886959802e-05, |
| "loss": 0.2895, |
| "step": 20740 |
| }, |
| { |
| "epoch": 11.64, |
| "learning_rate": 2.4411870164181925e-05, |
| "loss": 0.2964, |
| "step": 20750 |
| }, |
| { |
| "epoch": 11.64, |
| "learning_rate": 2.440903944140404e-05, |
| "loss": 0.2933, |
| "step": 20760 |
| }, |
| { |
| "epoch": 11.65, |
| "learning_rate": 2.4406208718626156e-05, |
| "loss": 0.3091, |
| "step": 20770 |
| }, |
| { |
| "epoch": 11.65, |
| "learning_rate": 2.4403377995848272e-05, |
| "loss": 0.295, |
| "step": 20780 |
| }, |
| { |
| "epoch": 11.66, |
| "learning_rate": 2.440054727307039e-05, |
| "loss": 0.2877, |
| "step": 20790 |
| }, |
| { |
| "epoch": 11.67, |
| "learning_rate": 2.439771655029251e-05, |
| "loss": 0.2979, |
| "step": 20800 |
| }, |
| { |
| "epoch": 11.67, |
| "learning_rate": 2.4394885827514626e-05, |
| "loss": 0.299, |
| "step": 20810 |
| }, |
| { |
| "epoch": 11.68, |
| "learning_rate": 2.4392055104736742e-05, |
| "loss": 0.2799, |
| "step": 20820 |
| }, |
| { |
| "epoch": 11.68, |
| "learning_rate": 2.438922438195886e-05, |
| "loss": 0.2999, |
| "step": 20830 |
| }, |
| { |
| "epoch": 11.69, |
| "learning_rate": 2.438639365918098e-05, |
| "loss": 0.2939, |
| "step": 20840 |
| }, |
| { |
| "epoch": 11.69, |
| "learning_rate": 2.4383562936403096e-05, |
| "loss": 0.2957, |
| "step": 20850 |
| }, |
| { |
| "epoch": 11.7, |
| "learning_rate": 2.4380732213625212e-05, |
| "loss": 0.3, |
| "step": 20860 |
| }, |
| { |
| "epoch": 11.7, |
| "learning_rate": 2.437790149084733e-05, |
| "loss": 0.2894, |
| "step": 20870 |
| }, |
| { |
| "epoch": 11.71, |
| "learning_rate": 2.4375070768069447e-05, |
| "loss": 0.2879, |
| "step": 20880 |
| }, |
| { |
| "epoch": 11.72, |
| "learning_rate": 2.4372240045291566e-05, |
| "loss": 0.2784, |
| "step": 20890 |
| }, |
| { |
| "epoch": 11.72, |
| "learning_rate": 2.4369409322513682e-05, |
| "loss": 0.2744, |
| "step": 20900 |
| }, |
| { |
| "epoch": 11.73, |
| "learning_rate": 2.43665785997358e-05, |
| "loss": 0.3072, |
| "step": 20910 |
| }, |
| { |
| "epoch": 11.73, |
| "learning_rate": 2.4363747876957917e-05, |
| "loss": 0.2914, |
| "step": 20920 |
| }, |
| { |
| "epoch": 11.74, |
| "learning_rate": 2.4360917154180036e-05, |
| "loss": 0.3039, |
| "step": 20930 |
| }, |
| { |
| "epoch": 11.74, |
| "learning_rate": 2.4358086431402152e-05, |
| "loss": 0.3072, |
| "step": 20940 |
| }, |
| { |
| "epoch": 11.75, |
| "learning_rate": 2.4355255708624268e-05, |
| "loss": 0.2898, |
| "step": 20950 |
| }, |
| { |
| "epoch": 11.76, |
| "learning_rate": 2.4352424985846387e-05, |
| "loss": 0.2802, |
| "step": 20960 |
| }, |
| { |
| "epoch": 11.76, |
| "learning_rate": 2.4349594263068503e-05, |
| "loss": 0.2958, |
| "step": 20970 |
| }, |
| { |
| "epoch": 11.77, |
| "learning_rate": 2.4346763540290622e-05, |
| "loss": 0.3033, |
| "step": 20980 |
| }, |
| { |
| "epoch": 11.77, |
| "learning_rate": 2.4343932817512738e-05, |
| "loss": 0.2924, |
| "step": 20990 |
| }, |
| { |
| "epoch": 11.78, |
| "learning_rate": 2.4341102094734857e-05, |
| "loss": 0.2845, |
| "step": 21000 |
| }, |
| { |
| "epoch": 11.78, |
| "learning_rate": 2.4338271371956973e-05, |
| "loss": 0.3198, |
| "step": 21010 |
| }, |
| { |
| "epoch": 11.79, |
| "learning_rate": 2.4335440649179092e-05, |
| "loss": 0.304, |
| "step": 21020 |
| }, |
| { |
| "epoch": 11.79, |
| "learning_rate": 2.4332609926401208e-05, |
| "loss": 0.3068, |
| "step": 21030 |
| }, |
| { |
| "epoch": 11.8, |
| "learning_rate": 2.4329779203623327e-05, |
| "loss": 0.2895, |
| "step": 21040 |
| }, |
| { |
| "epoch": 11.81, |
| "learning_rate": 2.4326948480845443e-05, |
| "loss": 0.2892, |
| "step": 21050 |
| }, |
| { |
| "epoch": 11.81, |
| "learning_rate": 2.432411775806756e-05, |
| "loss": 0.3046, |
| "step": 21060 |
| }, |
| { |
| "epoch": 11.82, |
| "learning_rate": 2.4321287035289678e-05, |
| "loss": 0.3121, |
| "step": 21070 |
| }, |
| { |
| "epoch": 11.82, |
| "learning_rate": 2.4318456312511797e-05, |
| "loss": 0.2961, |
| "step": 21080 |
| }, |
| { |
| "epoch": 11.83, |
| "learning_rate": 2.4315625589733913e-05, |
| "loss": 0.2921, |
| "step": 21090 |
| }, |
| { |
| "epoch": 11.83, |
| "learning_rate": 2.431279486695603e-05, |
| "loss": 0.3024, |
| "step": 21100 |
| }, |
| { |
| "epoch": 11.84, |
| "learning_rate": 2.4309964144178148e-05, |
| "loss": 0.3083, |
| "step": 21110 |
| }, |
| { |
| "epoch": 11.84, |
| "learning_rate": 2.4307133421400267e-05, |
| "loss": 0.2991, |
| "step": 21120 |
| }, |
| { |
| "epoch": 11.85, |
| "learning_rate": 2.4304302698622383e-05, |
| "loss": 0.2975, |
| "step": 21130 |
| }, |
| { |
| "epoch": 11.86, |
| "learning_rate": 2.43014719758445e-05, |
| "loss": 0.2868, |
| "step": 21140 |
| }, |
| { |
| "epoch": 11.86, |
| "learning_rate": 2.4298641253066614e-05, |
| "loss": 0.3061, |
| "step": 21150 |
| }, |
| { |
| "epoch": 11.87, |
| "learning_rate": 2.4295810530288737e-05, |
| "loss": 0.3133, |
| "step": 21160 |
| }, |
| { |
| "epoch": 11.87, |
| "learning_rate": 2.4292979807510853e-05, |
| "loss": 0.3046, |
| "step": 21170 |
| }, |
| { |
| "epoch": 11.88, |
| "learning_rate": 2.429014908473297e-05, |
| "loss": 0.2951, |
| "step": 21180 |
| }, |
| { |
| "epoch": 11.88, |
| "learning_rate": 2.4287318361955084e-05, |
| "loss": 0.2889, |
| "step": 21190 |
| }, |
| { |
| "epoch": 11.89, |
| "learning_rate": 2.4284487639177207e-05, |
| "loss": 0.2966, |
| "step": 21200 |
| }, |
| { |
| "epoch": 11.9, |
| "learning_rate": 2.4281656916399323e-05, |
| "loss": 0.3001, |
| "step": 21210 |
| }, |
| { |
| "epoch": 11.9, |
| "learning_rate": 2.427882619362144e-05, |
| "loss": 0.3016, |
| "step": 21220 |
| }, |
| { |
| "epoch": 11.91, |
| "learning_rate": 2.4275995470843554e-05, |
| "loss": 0.2961, |
| "step": 21230 |
| }, |
| { |
| "epoch": 11.91, |
| "learning_rate": 2.4273164748065673e-05, |
| "loss": 0.2886, |
| "step": 21240 |
| }, |
| { |
| "epoch": 11.92, |
| "learning_rate": 2.4270334025287793e-05, |
| "loss": 0.2856, |
| "step": 21250 |
| }, |
| { |
| "epoch": 11.92, |
| "learning_rate": 2.426750330250991e-05, |
| "loss": 0.3151, |
| "step": 21260 |
| }, |
| { |
| "epoch": 11.93, |
| "learning_rate": 2.4264672579732024e-05, |
| "loss": 0.2924, |
| "step": 21270 |
| }, |
| { |
| "epoch": 11.93, |
| "learning_rate": 2.4261841856954143e-05, |
| "loss": 0.2857, |
| "step": 21280 |
| }, |
| { |
| "epoch": 11.94, |
| "learning_rate": 2.4259011134176263e-05, |
| "loss": 0.2975, |
| "step": 21290 |
| }, |
| { |
| "epoch": 11.95, |
| "learning_rate": 2.425618041139838e-05, |
| "loss": 0.2924, |
| "step": 21300 |
| }, |
| { |
| "epoch": 11.95, |
| "learning_rate": 2.4253349688620494e-05, |
| "loss": 0.2966, |
| "step": 21310 |
| }, |
| { |
| "epoch": 11.96, |
| "learning_rate": 2.4250518965842613e-05, |
| "loss": 0.2847, |
| "step": 21320 |
| }, |
| { |
| "epoch": 11.96, |
| "learning_rate": 2.424768824306473e-05, |
| "loss": 0.3017, |
| "step": 21330 |
| }, |
| { |
| "epoch": 11.97, |
| "learning_rate": 2.424485752028685e-05, |
| "loss": 0.3086, |
| "step": 21340 |
| }, |
| { |
| "epoch": 11.97, |
| "learning_rate": 2.4242026797508964e-05, |
| "loss": 0.2809, |
| "step": 21350 |
| }, |
| { |
| "epoch": 11.98, |
| "learning_rate": 2.423919607473108e-05, |
| "loss": 0.3053, |
| "step": 21360 |
| }, |
| { |
| "epoch": 11.99, |
| "learning_rate": 2.42363653519532e-05, |
| "loss": 0.3115, |
| "step": 21370 |
| }, |
| { |
| "epoch": 11.99, |
| "learning_rate": 2.423353462917532e-05, |
| "loss": 0.2898, |
| "step": 21380 |
| }, |
| { |
| "epoch": 12.0, |
| "learning_rate": 2.4230703906397434e-05, |
| "loss": 0.3036, |
| "step": 21390 |
| }, |
| { |
| "epoch": 12.0, |
| "eval_cer": 0.16641946615828135, |
| "eval_loss": 0.4191434979438782, |
| "eval_runtime": 318.6388, |
| "eval_samples_per_second": 16.705, |
| "eval_steps_per_second": 4.177, |
| "eval_wer": 0.22667746371115102, |
| "step": 21396 |
| }, |
| { |
| "epoch": 12.0, |
| "learning_rate": 2.422787318361955e-05, |
| "loss": 0.3229, |
| "step": 21400 |
| }, |
| { |
| "epoch": 12.01, |
| "learning_rate": 2.422504246084167e-05, |
| "loss": 0.2716, |
| "step": 21410 |
| }, |
| { |
| "epoch": 12.01, |
| "learning_rate": 2.4222211738063785e-05, |
| "loss": 0.2866, |
| "step": 21420 |
| }, |
| { |
| "epoch": 12.02, |
| "learning_rate": 2.4219381015285904e-05, |
| "loss": 0.2747, |
| "step": 21430 |
| }, |
| { |
| "epoch": 12.02, |
| "learning_rate": 2.421655029250802e-05, |
| "loss": 0.2919, |
| "step": 21440 |
| }, |
| { |
| "epoch": 12.03, |
| "learning_rate": 2.421371956973014e-05, |
| "loss": 0.2802, |
| "step": 21450 |
| }, |
| { |
| "epoch": 12.04, |
| "learning_rate": 2.4210888846952255e-05, |
| "loss": 0.2794, |
| "step": 21460 |
| }, |
| { |
| "epoch": 12.04, |
| "learning_rate": 2.4208058124174374e-05, |
| "loss": 0.2688, |
| "step": 21470 |
| }, |
| { |
| "epoch": 12.05, |
| "learning_rate": 2.420522740139649e-05, |
| "loss": 0.2882, |
| "step": 21480 |
| }, |
| { |
| "epoch": 12.05, |
| "learning_rate": 2.420239667861861e-05, |
| "loss": 0.2793, |
| "step": 21490 |
| }, |
| { |
| "epoch": 12.06, |
| "learning_rate": 2.4199565955840725e-05, |
| "loss": 0.2818, |
| "step": 21500 |
| }, |
| { |
| "epoch": 12.06, |
| "learning_rate": 2.419673523306284e-05, |
| "loss": 0.2771, |
| "step": 21510 |
| }, |
| { |
| "epoch": 12.07, |
| "learning_rate": 2.419390451028496e-05, |
| "loss": 0.2772, |
| "step": 21520 |
| }, |
| { |
| "epoch": 12.08, |
| "learning_rate": 2.419107378750708e-05, |
| "loss": 0.279, |
| "step": 21530 |
| }, |
| { |
| "epoch": 12.08, |
| "learning_rate": 2.4188243064729195e-05, |
| "loss": 0.2984, |
| "step": 21540 |
| }, |
| { |
| "epoch": 12.09, |
| "learning_rate": 2.418541234195131e-05, |
| "loss": 0.2903, |
| "step": 21550 |
| }, |
| { |
| "epoch": 12.09, |
| "learning_rate": 2.418258161917343e-05, |
| "loss": 0.2889, |
| "step": 21560 |
| }, |
| { |
| "epoch": 12.1, |
| "learning_rate": 2.417975089639555e-05, |
| "loss": 0.2919, |
| "step": 21570 |
| }, |
| { |
| "epoch": 12.1, |
| "learning_rate": 2.4176920173617665e-05, |
| "loss": 0.2822, |
| "step": 21580 |
| }, |
| { |
| "epoch": 12.11, |
| "learning_rate": 2.417408945083978e-05, |
| "loss": 0.283, |
| "step": 21590 |
| }, |
| { |
| "epoch": 12.11, |
| "learning_rate": 2.4171258728061896e-05, |
| "loss": 0.285, |
| "step": 21600 |
| }, |
| { |
| "epoch": 12.12, |
| "learning_rate": 2.416842800528402e-05, |
| "loss": 0.2812, |
| "step": 21610 |
| }, |
| { |
| "epoch": 12.13, |
| "learning_rate": 2.4165597282506135e-05, |
| "loss": 0.287, |
| "step": 21620 |
| }, |
| { |
| "epoch": 12.13, |
| "learning_rate": 2.416276655972825e-05, |
| "loss": 0.2736, |
| "step": 21630 |
| }, |
| { |
| "epoch": 12.14, |
| "learning_rate": 2.4159935836950366e-05, |
| "loss": 0.3008, |
| "step": 21640 |
| }, |
| { |
| "epoch": 12.14, |
| "learning_rate": 2.415710511417249e-05, |
| "loss": 0.2906, |
| "step": 21650 |
| }, |
| { |
| "epoch": 12.15, |
| "learning_rate": 2.4154274391394605e-05, |
| "loss": 0.2797, |
| "step": 21660 |
| }, |
| { |
| "epoch": 12.15, |
| "learning_rate": 2.415144366861672e-05, |
| "loss": 0.2935, |
| "step": 21670 |
| }, |
| { |
| "epoch": 12.16, |
| "learning_rate": 2.4148612945838836e-05, |
| "loss": 0.2997, |
| "step": 21680 |
| }, |
| { |
| "epoch": 12.16, |
| "learning_rate": 2.4145782223060956e-05, |
| "loss": 0.293, |
| "step": 21690 |
| }, |
| { |
| "epoch": 12.17, |
| "learning_rate": 2.4142951500283075e-05, |
| "loss": 0.2727, |
| "step": 21700 |
| }, |
| { |
| "epoch": 12.18, |
| "learning_rate": 2.414012077750519e-05, |
| "loss": 0.2904, |
| "step": 21710 |
| }, |
| { |
| "epoch": 12.18, |
| "learning_rate": 2.4137290054727306e-05, |
| "loss": 0.2712, |
| "step": 21720 |
| }, |
| { |
| "epoch": 12.19, |
| "learning_rate": 2.4134459331949426e-05, |
| "loss": 0.2687, |
| "step": 21730 |
| }, |
| { |
| "epoch": 12.19, |
| "learning_rate": 2.4131628609171545e-05, |
| "loss": 0.2952, |
| "step": 21740 |
| }, |
| { |
| "epoch": 12.2, |
| "learning_rate": 2.412879788639366e-05, |
| "loss": 0.2658, |
| "step": 21750 |
| }, |
| { |
| "epoch": 12.2, |
| "learning_rate": 2.4125967163615776e-05, |
| "loss": 0.2799, |
| "step": 21760 |
| }, |
| { |
| "epoch": 12.21, |
| "learning_rate": 2.4123136440837892e-05, |
| "loss": 0.2742, |
| "step": 21770 |
| }, |
| { |
| "epoch": 12.22, |
| "learning_rate": 2.412030571806001e-05, |
| "loss": 0.2789, |
| "step": 21780 |
| }, |
| { |
| "epoch": 12.22, |
| "learning_rate": 2.411747499528213e-05, |
| "loss": 0.2811, |
| "step": 21790 |
| }, |
| { |
| "epoch": 12.23, |
| "learning_rate": 2.4114644272504246e-05, |
| "loss": 0.2871, |
| "step": 21800 |
| }, |
| { |
| "epoch": 12.23, |
| "learning_rate": 2.4111813549726362e-05, |
| "loss": 0.2845, |
| "step": 21810 |
| }, |
| { |
| "epoch": 12.24, |
| "learning_rate": 2.410898282694848e-05, |
| "loss": 0.2887, |
| "step": 21820 |
| }, |
| { |
| "epoch": 12.24, |
| "learning_rate": 2.41061521041706e-05, |
| "loss": 0.2771, |
| "step": 21830 |
| }, |
| { |
| "epoch": 12.25, |
| "learning_rate": 2.4103321381392716e-05, |
| "loss": 0.2985, |
| "step": 21840 |
| }, |
| { |
| "epoch": 12.25, |
| "learning_rate": 2.4100490658614832e-05, |
| "loss": 0.3182, |
| "step": 21850 |
| }, |
| { |
| "epoch": 12.26, |
| "learning_rate": 2.409765993583695e-05, |
| "loss": 0.2751, |
| "step": 21860 |
| }, |
| { |
| "epoch": 12.27, |
| "learning_rate": 2.4094829213059067e-05, |
| "loss": 0.2831, |
| "step": 21870 |
| }, |
| { |
| "epoch": 12.27, |
| "learning_rate": 2.4091998490281186e-05, |
| "loss": 0.292, |
| "step": 21880 |
| }, |
| { |
| "epoch": 12.28, |
| "learning_rate": 2.4089167767503302e-05, |
| "loss": 0.2863, |
| "step": 21890 |
| }, |
| { |
| "epoch": 12.28, |
| "learning_rate": 2.408633704472542e-05, |
| "loss": 0.2989, |
| "step": 21900 |
| }, |
| { |
| "epoch": 12.29, |
| "learning_rate": 2.4083506321947537e-05, |
| "loss": 0.293, |
| "step": 21910 |
| }, |
| { |
| "epoch": 12.29, |
| "learning_rate": 2.4080675599169656e-05, |
| "loss": 0.2686, |
| "step": 21920 |
| }, |
| { |
| "epoch": 12.3, |
| "learning_rate": 2.4077844876391772e-05, |
| "loss": 0.2726, |
| "step": 21930 |
| }, |
| { |
| "epoch": 12.3, |
| "learning_rate": 2.407501415361389e-05, |
| "loss": 0.3041, |
| "step": 21940 |
| }, |
| { |
| "epoch": 12.31, |
| "learning_rate": 2.4072183430836007e-05, |
| "loss": 0.2794, |
| "step": 21950 |
| }, |
| { |
| "epoch": 12.32, |
| "learning_rate": 2.4069352708058123e-05, |
| "loss": 0.2826, |
| "step": 21960 |
| }, |
| { |
| "epoch": 12.32, |
| "learning_rate": 2.4066521985280242e-05, |
| "loss": 0.3017, |
| "step": 21970 |
| }, |
| { |
| "epoch": 12.33, |
| "learning_rate": 2.406369126250236e-05, |
| "loss": 0.2746, |
| "step": 21980 |
| }, |
| { |
| "epoch": 12.33, |
| "learning_rate": 2.4060860539724477e-05, |
| "loss": 0.2982, |
| "step": 21990 |
| }, |
| { |
| "epoch": 12.34, |
| "learning_rate": 2.4058029816946593e-05, |
| "loss": 0.2892, |
| "step": 22000 |
| }, |
| { |
| "epoch": 12.34, |
| "learning_rate": 2.4055199094168712e-05, |
| "loss": 0.2981, |
| "step": 22010 |
| }, |
| { |
| "epoch": 12.35, |
| "learning_rate": 2.405236837139083e-05, |
| "loss": 0.2768, |
| "step": 22020 |
| }, |
| { |
| "epoch": 12.36, |
| "learning_rate": 2.4049537648612947e-05, |
| "loss": 0.29, |
| "step": 22030 |
| }, |
| { |
| "epoch": 12.36, |
| "learning_rate": 2.4046706925835063e-05, |
| "loss": 0.298, |
| "step": 22040 |
| }, |
| { |
| "epoch": 12.37, |
| "learning_rate": 2.404387620305718e-05, |
| "loss": 0.2818, |
| "step": 22050 |
| }, |
| { |
| "epoch": 12.37, |
| "learning_rate": 2.40410454802793e-05, |
| "loss": 0.2818, |
| "step": 22060 |
| }, |
| { |
| "epoch": 12.38, |
| "learning_rate": 2.4038214757501417e-05, |
| "loss": 0.2759, |
| "step": 22070 |
| }, |
| { |
| "epoch": 12.38, |
| "learning_rate": 2.4035384034723533e-05, |
| "loss": 0.2814, |
| "step": 22080 |
| }, |
| { |
| "epoch": 12.39, |
| "learning_rate": 2.403255331194565e-05, |
| "loss": 0.2933, |
| "step": 22090 |
| }, |
| { |
| "epoch": 12.39, |
| "learning_rate": 2.402972258916777e-05, |
| "loss": 0.2778, |
| "step": 22100 |
| }, |
| { |
| "epoch": 12.4, |
| "learning_rate": 2.4026891866389887e-05, |
| "loss": 0.2856, |
| "step": 22110 |
| }, |
| { |
| "epoch": 12.41, |
| "learning_rate": 2.4024061143612003e-05, |
| "loss": 0.2708, |
| "step": 22120 |
| }, |
| { |
| "epoch": 12.41, |
| "learning_rate": 2.402123042083412e-05, |
| "loss": 0.2808, |
| "step": 22130 |
| }, |
| { |
| "epoch": 12.42, |
| "learning_rate": 2.4018399698056238e-05, |
| "loss": 0.2796, |
| "step": 22140 |
| }, |
| { |
| "epoch": 12.42, |
| "learning_rate": 2.4015568975278357e-05, |
| "loss": 0.2748, |
| "step": 22150 |
| }, |
| { |
| "epoch": 12.43, |
| "learning_rate": 2.4012738252500473e-05, |
| "loss": 0.2937, |
| "step": 22160 |
| }, |
| { |
| "epoch": 12.43, |
| "learning_rate": 2.400990752972259e-05, |
| "loss": 0.2882, |
| "step": 22170 |
| }, |
| { |
| "epoch": 12.44, |
| "learning_rate": 2.4007076806944704e-05, |
| "loss": 0.2774, |
| "step": 22180 |
| }, |
| { |
| "epoch": 12.45, |
| "learning_rate": 2.4004246084166827e-05, |
| "loss": 0.2831, |
| "step": 22190 |
| }, |
| { |
| "epoch": 12.45, |
| "learning_rate": 2.4001415361388943e-05, |
| "loss": 0.2943, |
| "step": 22200 |
| }, |
| { |
| "epoch": 12.46, |
| "learning_rate": 2.399858463861106e-05, |
| "loss": 0.2915, |
| "step": 22210 |
| }, |
| { |
| "epoch": 12.46, |
| "learning_rate": 2.3995753915833174e-05, |
| "loss": 0.2928, |
| "step": 22220 |
| }, |
| { |
| "epoch": 12.47, |
| "learning_rate": 2.3992923193055293e-05, |
| "loss": 0.2722, |
| "step": 22230 |
| }, |
| { |
| "epoch": 12.47, |
| "learning_rate": 2.3990092470277413e-05, |
| "loss": 0.2979, |
| "step": 22240 |
| }, |
| { |
| "epoch": 12.48, |
| "learning_rate": 2.398726174749953e-05, |
| "loss": 0.2872, |
| "step": 22250 |
| }, |
| { |
| "epoch": 12.48, |
| "learning_rate": 2.3984431024721644e-05, |
| "loss": 0.2935, |
| "step": 22260 |
| }, |
| { |
| "epoch": 12.49, |
| "learning_rate": 2.3981600301943763e-05, |
| "loss": 0.2787, |
| "step": 22270 |
| }, |
| { |
| "epoch": 12.5, |
| "learning_rate": 2.3978769579165883e-05, |
| "loss": 0.2801, |
| "step": 22280 |
| }, |
| { |
| "epoch": 12.5, |
| "learning_rate": 2.3975938856388e-05, |
| "loss": 0.2902, |
| "step": 22290 |
| }, |
| { |
| "epoch": 12.51, |
| "learning_rate": 2.3973108133610114e-05, |
| "loss": 0.2909, |
| "step": 22300 |
| }, |
| { |
| "epoch": 12.51, |
| "learning_rate": 2.3970277410832233e-05, |
| "loss": 0.285, |
| "step": 22310 |
| }, |
| { |
| "epoch": 12.52, |
| "learning_rate": 2.396744668805435e-05, |
| "loss": 0.269, |
| "step": 22320 |
| }, |
| { |
| "epoch": 12.52, |
| "learning_rate": 2.396461596527647e-05, |
| "loss": 0.2743, |
| "step": 22330 |
| }, |
| { |
| "epoch": 12.53, |
| "learning_rate": 2.3961785242498584e-05, |
| "loss": 0.2837, |
| "step": 22340 |
| }, |
| { |
| "epoch": 12.53, |
| "learning_rate": 2.3958954519720703e-05, |
| "loss": 0.3011, |
| "step": 22350 |
| }, |
| { |
| "epoch": 12.54, |
| "learning_rate": 2.395612379694282e-05, |
| "loss": 0.2894, |
| "step": 22360 |
| }, |
| { |
| "epoch": 12.55, |
| "learning_rate": 2.395329307416494e-05, |
| "loss": 0.2877, |
| "step": 22370 |
| }, |
| { |
| "epoch": 12.55, |
| "learning_rate": 2.3950462351387054e-05, |
| "loss": 0.2849, |
| "step": 22380 |
| }, |
| { |
| "epoch": 12.56, |
| "learning_rate": 2.3947631628609173e-05, |
| "loss": 0.3123, |
| "step": 22390 |
| }, |
| { |
| "epoch": 12.56, |
| "learning_rate": 2.394480090583129e-05, |
| "loss": 0.2912, |
| "step": 22400 |
| }, |
| { |
| "epoch": 12.57, |
| "learning_rate": 2.3941970183053405e-05, |
| "loss": 0.2788, |
| "step": 22410 |
| }, |
| { |
| "epoch": 12.57, |
| "learning_rate": 2.3939139460275524e-05, |
| "loss": 0.268, |
| "step": 22420 |
| }, |
| { |
| "epoch": 12.58, |
| "learning_rate": 2.3936308737497643e-05, |
| "loss": 0.2856, |
| "step": 22430 |
| }, |
| { |
| "epoch": 12.59, |
| "learning_rate": 2.393347801471976e-05, |
| "loss": 0.2883, |
| "step": 22440 |
| }, |
| { |
| "epoch": 12.59, |
| "learning_rate": 2.3930647291941875e-05, |
| "loss": 0.2865, |
| "step": 22450 |
| }, |
| { |
| "epoch": 12.6, |
| "learning_rate": 2.3927816569163994e-05, |
| "loss": 0.2903, |
| "step": 22460 |
| }, |
| { |
| "epoch": 12.6, |
| "learning_rate": 2.3924985846386113e-05, |
| "loss": 0.2696, |
| "step": 22470 |
| }, |
| { |
| "epoch": 12.61, |
| "learning_rate": 2.392215512360823e-05, |
| "loss": 0.2847, |
| "step": 22480 |
| }, |
| { |
| "epoch": 12.61, |
| "learning_rate": 2.3919324400830345e-05, |
| "loss": 0.2847, |
| "step": 22490 |
| }, |
| { |
| "epoch": 12.62, |
| "learning_rate": 2.391649367805246e-05, |
| "loss": 0.306, |
| "step": 22500 |
| }, |
| { |
| "epoch": 12.62, |
| "learning_rate": 2.3913662955274583e-05, |
| "loss": 0.2978, |
| "step": 22510 |
| }, |
| { |
| "epoch": 12.63, |
| "learning_rate": 2.39108322324967e-05, |
| "loss": 0.2843, |
| "step": 22520 |
| }, |
| { |
| "epoch": 12.64, |
| "learning_rate": 2.3908001509718815e-05, |
| "loss": 0.2824, |
| "step": 22530 |
| }, |
| { |
| "epoch": 12.64, |
| "learning_rate": 2.390517078694093e-05, |
| "loss": 0.2868, |
| "step": 22540 |
| }, |
| { |
| "epoch": 12.65, |
| "learning_rate": 2.3902340064163053e-05, |
| "loss": 0.2865, |
| "step": 22550 |
| }, |
| { |
| "epoch": 12.65, |
| "learning_rate": 2.389950934138517e-05, |
| "loss": 0.2942, |
| "step": 22560 |
| }, |
| { |
| "epoch": 12.66, |
| "learning_rate": 2.3896678618607285e-05, |
| "loss": 0.2893, |
| "step": 22570 |
| }, |
| { |
| "epoch": 12.66, |
| "learning_rate": 2.38938478958294e-05, |
| "loss": 0.2856, |
| "step": 22580 |
| }, |
| { |
| "epoch": 12.67, |
| "learning_rate": 2.3891017173051517e-05, |
| "loss": 0.2783, |
| "step": 22590 |
| }, |
| { |
| "epoch": 12.67, |
| "learning_rate": 2.388818645027364e-05, |
| "loss": 0.3056, |
| "step": 22600 |
| }, |
| { |
| "epoch": 12.68, |
| "learning_rate": 2.3885355727495755e-05, |
| "loss": 0.2856, |
| "step": 22610 |
| }, |
| { |
| "epoch": 12.69, |
| "learning_rate": 2.388252500471787e-05, |
| "loss": 0.2896, |
| "step": 22620 |
| }, |
| { |
| "epoch": 12.69, |
| "learning_rate": 2.3879694281939986e-05, |
| "loss": 0.2817, |
| "step": 22630 |
| }, |
| { |
| "epoch": 12.7, |
| "learning_rate": 2.387686355916211e-05, |
| "loss": 0.2818, |
| "step": 22640 |
| }, |
| { |
| "epoch": 12.7, |
| "learning_rate": 2.3874032836384225e-05, |
| "loss": 0.2982, |
| "step": 22650 |
| }, |
| { |
| "epoch": 12.71, |
| "learning_rate": 2.387120211360634e-05, |
| "loss": 0.2841, |
| "step": 22660 |
| }, |
| { |
| "epoch": 12.71, |
| "learning_rate": 2.3868371390828456e-05, |
| "loss": 0.2934, |
| "step": 22670 |
| }, |
| { |
| "epoch": 12.72, |
| "learning_rate": 2.3865540668050576e-05, |
| "loss": 0.2914, |
| "step": 22680 |
| }, |
| { |
| "epoch": 12.73, |
| "learning_rate": 2.3862709945272695e-05, |
| "loss": 0.3091, |
| "step": 22690 |
| }, |
| { |
| "epoch": 12.73, |
| "learning_rate": 2.385987922249481e-05, |
| "loss": 0.2817, |
| "step": 22700 |
| }, |
| { |
| "epoch": 12.74, |
| "learning_rate": 2.3857048499716926e-05, |
| "loss": 0.2847, |
| "step": 22710 |
| }, |
| { |
| "epoch": 12.74, |
| "learning_rate": 2.3854217776939046e-05, |
| "loss": 0.283, |
| "step": 22720 |
| }, |
| { |
| "epoch": 12.75, |
| "learning_rate": 2.3851387054161165e-05, |
| "loss": 0.2789, |
| "step": 22730 |
| }, |
| { |
| "epoch": 12.75, |
| "learning_rate": 2.384855633138328e-05, |
| "loss": 0.2988, |
| "step": 22740 |
| }, |
| { |
| "epoch": 12.76, |
| "learning_rate": 2.3845725608605396e-05, |
| "loss": 0.2935, |
| "step": 22750 |
| }, |
| { |
| "epoch": 12.76, |
| "learning_rate": 2.3843177958105305e-05, |
| "loss": 0.2849, |
| "step": 22760 |
| }, |
| { |
| "epoch": 12.77, |
| "learning_rate": 2.384034723532742e-05, |
| "loss": 0.2795, |
| "step": 22770 |
| }, |
| { |
| "epoch": 12.78, |
| "learning_rate": 2.3837516512549537e-05, |
| "loss": 0.2834, |
| "step": 22780 |
| }, |
| { |
| "epoch": 12.78, |
| "learning_rate": 2.3834685789771653e-05, |
| "loss": 0.2683, |
| "step": 22790 |
| }, |
| { |
| "epoch": 12.79, |
| "learning_rate": 2.3831855066993775e-05, |
| "loss": 0.2952, |
| "step": 22800 |
| }, |
| { |
| "epoch": 12.79, |
| "learning_rate": 2.382902434421589e-05, |
| "loss": 0.2923, |
| "step": 22810 |
| }, |
| { |
| "epoch": 12.8, |
| "learning_rate": 2.3826193621438007e-05, |
| "loss": 0.2966, |
| "step": 22820 |
| }, |
| { |
| "epoch": 12.8, |
| "learning_rate": 2.3823362898660123e-05, |
| "loss": 0.2774, |
| "step": 22830 |
| }, |
| { |
| "epoch": 12.81, |
| "learning_rate": 2.3820532175882242e-05, |
| "loss": 0.2642, |
| "step": 22840 |
| }, |
| { |
| "epoch": 12.82, |
| "learning_rate": 2.381770145310436e-05, |
| "loss": 0.291, |
| "step": 22850 |
| }, |
| { |
| "epoch": 12.82, |
| "learning_rate": 2.3814870730326477e-05, |
| "loss": 0.2769, |
| "step": 22860 |
| }, |
| { |
| "epoch": 12.83, |
| "learning_rate": 2.3812040007548593e-05, |
| "loss": 0.289, |
| "step": 22870 |
| }, |
| { |
| "epoch": 12.83, |
| "learning_rate": 2.3809209284770712e-05, |
| "loss": 0.287, |
| "step": 22880 |
| }, |
| { |
| "epoch": 12.84, |
| "learning_rate": 2.380637856199283e-05, |
| "loss": 0.2739, |
| "step": 22890 |
| }, |
| { |
| "epoch": 12.84, |
| "learning_rate": 2.3803547839214947e-05, |
| "loss": 0.2931, |
| "step": 22900 |
| }, |
| { |
| "epoch": 12.85, |
| "learning_rate": 2.3800717116437063e-05, |
| "loss": 0.2912, |
| "step": 22910 |
| }, |
| { |
| "epoch": 12.85, |
| "learning_rate": 2.3797886393659182e-05, |
| "loss": 0.2801, |
| "step": 22920 |
| }, |
| { |
| "epoch": 12.86, |
| "learning_rate": 2.3795055670881298e-05, |
| "loss": 0.2785, |
| "step": 22930 |
| }, |
| { |
| "epoch": 12.87, |
| "learning_rate": 2.3792224948103417e-05, |
| "loss": 0.2941, |
| "step": 22940 |
| }, |
| { |
| "epoch": 12.87, |
| "learning_rate": 2.3789394225325533e-05, |
| "loss": 0.2852, |
| "step": 22950 |
| }, |
| { |
| "epoch": 12.88, |
| "learning_rate": 2.3786563502547652e-05, |
| "loss": 0.279, |
| "step": 22960 |
| }, |
| { |
| "epoch": 12.88, |
| "learning_rate": 2.3783732779769768e-05, |
| "loss": 0.2781, |
| "step": 22970 |
| }, |
| { |
| "epoch": 12.89, |
| "learning_rate": 2.3780902056991887e-05, |
| "loss": 0.2805, |
| "step": 22980 |
| }, |
| { |
| "epoch": 12.89, |
| "learning_rate": 2.3778071334214003e-05, |
| "loss": 0.297, |
| "step": 22990 |
| }, |
| { |
| "epoch": 12.9, |
| "learning_rate": 2.3775240611436122e-05, |
| "loss": 0.2874, |
| "step": 23000 |
| }, |
| { |
| "epoch": 12.9, |
| "learning_rate": 2.3772409888658238e-05, |
| "loss": 0.2737, |
| "step": 23010 |
| }, |
| { |
| "epoch": 12.91, |
| "learning_rate": 2.3769579165880357e-05, |
| "loss": 0.278, |
| "step": 23020 |
| }, |
| { |
| "epoch": 12.92, |
| "learning_rate": 2.3766748443102473e-05, |
| "loss": 0.2913, |
| "step": 23030 |
| }, |
| { |
| "epoch": 12.92, |
| "learning_rate": 2.3763917720324592e-05, |
| "loss": 0.2916, |
| "step": 23040 |
| }, |
| { |
| "epoch": 12.93, |
| "learning_rate": 2.3761086997546708e-05, |
| "loss": 0.2749, |
| "step": 23050 |
| }, |
| { |
| "epoch": 12.93, |
| "learning_rate": 2.3758256274768823e-05, |
| "loss": 0.2839, |
| "step": 23060 |
| }, |
| { |
| "epoch": 12.94, |
| "learning_rate": 2.3755425551990943e-05, |
| "loss": 0.2764, |
| "step": 23070 |
| }, |
| { |
| "epoch": 12.94, |
| "learning_rate": 2.3752594829213062e-05, |
| "loss": 0.2854, |
| "step": 23080 |
| }, |
| { |
| "epoch": 12.95, |
| "learning_rate": 2.3749764106435178e-05, |
| "loss": 0.2923, |
| "step": 23090 |
| }, |
| { |
| "epoch": 12.96, |
| "learning_rate": 2.3746933383657293e-05, |
| "loss": 0.3001, |
| "step": 23100 |
| }, |
| { |
| "epoch": 12.96, |
| "learning_rate": 2.3744102660879413e-05, |
| "loss": 0.2886, |
| "step": 23110 |
| }, |
| { |
| "epoch": 12.97, |
| "learning_rate": 2.3741271938101532e-05, |
| "loss": 0.2883, |
| "step": 23120 |
| }, |
| { |
| "epoch": 12.97, |
| "learning_rate": 2.3738441215323648e-05, |
| "loss": 0.2955, |
| "step": 23130 |
| }, |
| { |
| "epoch": 12.98, |
| "learning_rate": 2.3735610492545763e-05, |
| "loss": 0.2943, |
| "step": 23140 |
| }, |
| { |
| "epoch": 12.98, |
| "learning_rate": 2.373277976976788e-05, |
| "loss": 0.3136, |
| "step": 23150 |
| }, |
| { |
| "epoch": 12.99, |
| "learning_rate": 2.372994904699e-05, |
| "loss": 0.2925, |
| "step": 23160 |
| }, |
| { |
| "epoch": 12.99, |
| "learning_rate": 2.3727118324212118e-05, |
| "loss": 0.2762, |
| "step": 23170 |
| }, |
| { |
| "epoch": 13.0, |
| "eval_cer": 0.16842800896233331, |
| "eval_loss": 0.4157847464084625, |
| "eval_runtime": 320.1707, |
| "eval_samples_per_second": 16.626, |
| "eval_steps_per_second": 4.157, |
| "eval_wer": 0.22711653183506814, |
| "step": 23179 |
| }, |
| { |
| "epoch": 13.0, |
| "learning_rate": 2.3724287601434233e-05, |
| "loss": 0.3163, |
| "step": 23180 |
| }, |
| { |
| "epoch": 13.01, |
| "learning_rate": 2.372145687865635e-05, |
| "loss": 0.2676, |
| "step": 23190 |
| }, |
| { |
| "epoch": 13.01, |
| "learning_rate": 2.371862615587847e-05, |
| "loss": 0.2792, |
| "step": 23200 |
| }, |
| { |
| "epoch": 13.02, |
| "learning_rate": 2.3715795433100588e-05, |
| "loss": 0.2555, |
| "step": 23210 |
| }, |
| { |
| "epoch": 13.02, |
| "learning_rate": 2.3712964710322703e-05, |
| "loss": 0.2732, |
| "step": 23220 |
| }, |
| { |
| "epoch": 13.03, |
| "learning_rate": 2.371013398754482e-05, |
| "loss": 0.2574, |
| "step": 23230 |
| }, |
| { |
| "epoch": 13.03, |
| "learning_rate": 2.3707303264766935e-05, |
| "loss": 0.2897, |
| "step": 23240 |
| }, |
| { |
| "epoch": 13.04, |
| "learning_rate": 2.3704472541989058e-05, |
| "loss": 0.2722, |
| "step": 23250 |
| }, |
| { |
| "epoch": 13.05, |
| "learning_rate": 2.3701641819211173e-05, |
| "loss": 0.2655, |
| "step": 23260 |
| }, |
| { |
| "epoch": 13.05, |
| "learning_rate": 2.369881109643329e-05, |
| "loss": 0.2783, |
| "step": 23270 |
| }, |
| { |
| "epoch": 13.06, |
| "learning_rate": 2.3695980373655405e-05, |
| "loss": 0.2829, |
| "step": 23280 |
| }, |
| { |
| "epoch": 13.06, |
| "learning_rate": 2.3693149650877528e-05, |
| "loss": 0.2796, |
| "step": 23290 |
| }, |
| { |
| "epoch": 13.07, |
| "learning_rate": 2.3690318928099643e-05, |
| "loss": 0.268, |
| "step": 23300 |
| }, |
| { |
| "epoch": 13.07, |
| "learning_rate": 2.368748820532176e-05, |
| "loss": 0.268, |
| "step": 23310 |
| }, |
| { |
| "epoch": 13.08, |
| "learning_rate": 2.3684657482543875e-05, |
| "loss": 0.2747, |
| "step": 23320 |
| }, |
| { |
| "epoch": 13.08, |
| "learning_rate": 2.3681826759765994e-05, |
| "loss": 0.2653, |
| "step": 23330 |
| }, |
| { |
| "epoch": 13.09, |
| "learning_rate": 2.3678996036988113e-05, |
| "loss": 0.2763, |
| "step": 23340 |
| }, |
| { |
| "epoch": 13.1, |
| "learning_rate": 2.367616531421023e-05, |
| "loss": 0.2731, |
| "step": 23350 |
| }, |
| { |
| "epoch": 13.1, |
| "learning_rate": 2.3673334591432345e-05, |
| "loss": 0.2515, |
| "step": 23360 |
| }, |
| { |
| "epoch": 13.11, |
| "learning_rate": 2.3670503868654464e-05, |
| "loss": 0.2802, |
| "step": 23370 |
| }, |
| { |
| "epoch": 13.11, |
| "learning_rate": 2.3667673145876583e-05, |
| "loss": 0.2783, |
| "step": 23380 |
| }, |
| { |
| "epoch": 13.12, |
| "learning_rate": 2.36648424230987e-05, |
| "loss": 0.2721, |
| "step": 23390 |
| }, |
| { |
| "epoch": 13.12, |
| "learning_rate": 2.3662011700320815e-05, |
| "loss": 0.2682, |
| "step": 23400 |
| }, |
| { |
| "epoch": 13.13, |
| "learning_rate": 2.3659180977542934e-05, |
| "loss": 0.283, |
| "step": 23410 |
| }, |
| { |
| "epoch": 13.14, |
| "learning_rate": 2.365635025476505e-05, |
| "loss": 0.2562, |
| "step": 23420 |
| }, |
| { |
| "epoch": 13.14, |
| "learning_rate": 2.365351953198717e-05, |
| "loss": 0.2775, |
| "step": 23430 |
| }, |
| { |
| "epoch": 13.15, |
| "learning_rate": 2.3650688809209285e-05, |
| "loss": 0.2836, |
| "step": 23440 |
| }, |
| { |
| "epoch": 13.15, |
| "learning_rate": 2.3647858086431404e-05, |
| "loss": 0.258, |
| "step": 23450 |
| }, |
| { |
| "epoch": 13.16, |
| "learning_rate": 2.364502736365352e-05, |
| "loss": 0.2635, |
| "step": 23460 |
| }, |
| { |
| "epoch": 13.16, |
| "learning_rate": 2.364219664087564e-05, |
| "loss": 0.2784, |
| "step": 23470 |
| }, |
| { |
| "epoch": 13.17, |
| "learning_rate": 2.3639365918097755e-05, |
| "loss": 0.2779, |
| "step": 23480 |
| }, |
| { |
| "epoch": 13.17, |
| "learning_rate": 2.3636535195319874e-05, |
| "loss": 0.2818, |
| "step": 23490 |
| }, |
| { |
| "epoch": 13.18, |
| "learning_rate": 2.363370447254199e-05, |
| "loss": 0.2758, |
| "step": 23500 |
| }, |
| { |
| "epoch": 13.19, |
| "learning_rate": 2.3630873749764106e-05, |
| "loss": 0.2924, |
| "step": 23510 |
| }, |
| { |
| "epoch": 13.19, |
| "learning_rate": 2.3628043026986225e-05, |
| "loss": 0.2723, |
| "step": 23520 |
| }, |
| { |
| "epoch": 13.2, |
| "learning_rate": 2.3625212304208344e-05, |
| "loss": 0.273, |
| "step": 23530 |
| }, |
| { |
| "epoch": 13.2, |
| "learning_rate": 2.362238158143046e-05, |
| "loss": 0.2818, |
| "step": 23540 |
| }, |
| { |
| "epoch": 13.21, |
| "learning_rate": 2.3619550858652576e-05, |
| "loss": 0.2804, |
| "step": 23550 |
| }, |
| { |
| "epoch": 13.21, |
| "learning_rate": 2.3616720135874695e-05, |
| "loss": 0.2827, |
| "step": 23560 |
| }, |
| { |
| "epoch": 13.22, |
| "learning_rate": 2.361388941309681e-05, |
| "loss": 0.2748, |
| "step": 23570 |
| }, |
| { |
| "epoch": 13.22, |
| "learning_rate": 2.361105869031893e-05, |
| "loss": 0.2838, |
| "step": 23580 |
| }, |
| { |
| "epoch": 13.23, |
| "learning_rate": 2.3608227967541046e-05, |
| "loss": 0.2732, |
| "step": 23590 |
| }, |
| { |
| "epoch": 13.24, |
| "learning_rate": 2.360539724476316e-05, |
| "loss": 0.2704, |
| "step": 23600 |
| }, |
| { |
| "epoch": 13.24, |
| "learning_rate": 2.360256652198528e-05, |
| "loss": 0.2797, |
| "step": 23610 |
| }, |
| { |
| "epoch": 13.25, |
| "learning_rate": 2.35997357992074e-05, |
| "loss": 0.2717, |
| "step": 23620 |
| }, |
| { |
| "epoch": 13.25, |
| "learning_rate": 2.3596905076429516e-05, |
| "loss": 0.2857, |
| "step": 23630 |
| }, |
| { |
| "epoch": 13.26, |
| "learning_rate": 2.359407435365163e-05, |
| "loss": 0.2906, |
| "step": 23640 |
| }, |
| { |
| "epoch": 13.26, |
| "learning_rate": 2.359124363087375e-05, |
| "loss": 0.2772, |
| "step": 23650 |
| }, |
| { |
| "epoch": 13.27, |
| "learning_rate": 2.358841290809587e-05, |
| "loss": 0.2695, |
| "step": 23660 |
| }, |
| { |
| "epoch": 13.28, |
| "learning_rate": 2.3585582185317986e-05, |
| "loss": 0.2705, |
| "step": 23670 |
| }, |
| { |
| "epoch": 13.28, |
| "learning_rate": 2.35827514625401e-05, |
| "loss": 0.2945, |
| "step": 23680 |
| }, |
| { |
| "epoch": 13.29, |
| "learning_rate": 2.3579920739762217e-05, |
| "loss": 0.2795, |
| "step": 23690 |
| }, |
| { |
| "epoch": 13.29, |
| "learning_rate": 2.357709001698434e-05, |
| "loss": 0.2822, |
| "step": 23700 |
| }, |
| { |
| "epoch": 13.3, |
| "learning_rate": 2.3574259294206455e-05, |
| "loss": 0.2663, |
| "step": 23710 |
| }, |
| { |
| "epoch": 13.3, |
| "learning_rate": 2.357142857142857e-05, |
| "loss": 0.2704, |
| "step": 23720 |
| }, |
| { |
| "epoch": 13.31, |
| "learning_rate": 2.3568597848650687e-05, |
| "loss": 0.2604, |
| "step": 23730 |
| }, |
| { |
| "epoch": 13.31, |
| "learning_rate": 2.356576712587281e-05, |
| "loss": 0.2628, |
| "step": 23740 |
| }, |
| { |
| "epoch": 13.32, |
| "learning_rate": 2.3562936403094925e-05, |
| "loss": 0.2769, |
| "step": 23750 |
| }, |
| { |
| "epoch": 13.33, |
| "learning_rate": 2.356010568031704e-05, |
| "loss": 0.2671, |
| "step": 23760 |
| }, |
| { |
| "epoch": 13.33, |
| "learning_rate": 2.3557274957539157e-05, |
| "loss": 0.274, |
| "step": 23770 |
| }, |
| { |
| "epoch": 13.34, |
| "learning_rate": 2.3554444234761276e-05, |
| "loss": 0.2835, |
| "step": 23780 |
| }, |
| { |
| "epoch": 13.34, |
| "learning_rate": 2.3551613511983395e-05, |
| "loss": 0.2829, |
| "step": 23790 |
| }, |
| { |
| "epoch": 13.35, |
| "learning_rate": 2.354878278920551e-05, |
| "loss": 0.2809, |
| "step": 23800 |
| }, |
| { |
| "epoch": 13.35, |
| "learning_rate": 2.3545952066427627e-05, |
| "loss": 0.2727, |
| "step": 23810 |
| }, |
| { |
| "epoch": 13.36, |
| "learning_rate": 2.3543121343649746e-05, |
| "loss": 0.2828, |
| "step": 23820 |
| }, |
| { |
| "epoch": 13.36, |
| "learning_rate": 2.3540290620871865e-05, |
| "loss": 0.2757, |
| "step": 23830 |
| }, |
| { |
| "epoch": 13.37, |
| "learning_rate": 2.353745989809398e-05, |
| "loss": 0.281, |
| "step": 23840 |
| }, |
| { |
| "epoch": 13.38, |
| "learning_rate": 2.3534629175316097e-05, |
| "loss": 0.2696, |
| "step": 23850 |
| }, |
| { |
| "epoch": 13.38, |
| "learning_rate": 2.3531798452538216e-05, |
| "loss": 0.2795, |
| "step": 23860 |
| }, |
| { |
| "epoch": 13.39, |
| "learning_rate": 2.3528967729760332e-05, |
| "loss": 0.2644, |
| "step": 23870 |
| }, |
| { |
| "epoch": 13.39, |
| "learning_rate": 2.352613700698245e-05, |
| "loss": 0.2801, |
| "step": 23880 |
| }, |
| { |
| "epoch": 13.4, |
| "learning_rate": 2.3523306284204567e-05, |
| "loss": 0.2677, |
| "step": 23890 |
| }, |
| { |
| "epoch": 13.4, |
| "learning_rate": 2.3520475561426686e-05, |
| "loss": 0.2889, |
| "step": 23900 |
| }, |
| { |
| "epoch": 13.41, |
| "learning_rate": 2.3517644838648802e-05, |
| "loss": 0.2778, |
| "step": 23910 |
| }, |
| { |
| "epoch": 13.42, |
| "learning_rate": 2.351481411587092e-05, |
| "loss": 0.2713, |
| "step": 23920 |
| }, |
| { |
| "epoch": 13.42, |
| "learning_rate": 2.3511983393093037e-05, |
| "loss": 0.2684, |
| "step": 23930 |
| }, |
| { |
| "epoch": 13.43, |
| "learning_rate": 2.3509152670315156e-05, |
| "loss": 0.2659, |
| "step": 23940 |
| }, |
| { |
| "epoch": 13.43, |
| "learning_rate": 2.3506321947537272e-05, |
| "loss": 0.2734, |
| "step": 23950 |
| }, |
| { |
| "epoch": 13.44, |
| "learning_rate": 2.3503491224759388e-05, |
| "loss": 0.269, |
| "step": 23960 |
| }, |
| { |
| "epoch": 13.44, |
| "learning_rate": 2.3500660501981507e-05, |
| "loss": 0.2752, |
| "step": 23970 |
| }, |
| { |
| "epoch": 13.45, |
| "learning_rate": 2.3497829779203623e-05, |
| "loss": 0.2732, |
| "step": 23980 |
| }, |
| { |
| "epoch": 13.45, |
| "learning_rate": 2.3494999056425742e-05, |
| "loss": 0.2709, |
| "step": 23990 |
| }, |
| { |
| "epoch": 13.46, |
| "learning_rate": 2.3492168333647858e-05, |
| "loss": 0.2614, |
| "step": 24000 |
| }, |
| { |
| "epoch": 13.47, |
| "learning_rate": 2.3489337610869977e-05, |
| "loss": 0.2818, |
| "step": 24010 |
| }, |
| { |
| "epoch": 13.47, |
| "learning_rate": 2.3486506888092093e-05, |
| "loss": 0.268, |
| "step": 24020 |
| }, |
| { |
| "epoch": 13.48, |
| "learning_rate": 2.3483676165314212e-05, |
| "loss": 0.2868, |
| "step": 24030 |
| }, |
| { |
| "epoch": 13.48, |
| "learning_rate": 2.3480845442536328e-05, |
| "loss": 0.2824, |
| "step": 24040 |
| }, |
| { |
| "epoch": 13.49, |
| "learning_rate": 2.3478014719758444e-05, |
| "loss": 0.2756, |
| "step": 24050 |
| }, |
| { |
| "epoch": 13.49, |
| "learning_rate": 2.3475183996980563e-05, |
| "loss": 0.2732, |
| "step": 24060 |
| }, |
| { |
| "epoch": 13.5, |
| "learning_rate": 2.3472353274202682e-05, |
| "loss": 0.2738, |
| "step": 24070 |
| }, |
| { |
| "epoch": 13.51, |
| "learning_rate": 2.3469522551424798e-05, |
| "loss": 0.2679, |
| "step": 24080 |
| }, |
| { |
| "epoch": 13.51, |
| "learning_rate": 2.3466691828646914e-05, |
| "loss": 0.2774, |
| "step": 24090 |
| }, |
| { |
| "epoch": 13.52, |
| "learning_rate": 2.3463861105869033e-05, |
| "loss": 0.2819, |
| "step": 24100 |
| }, |
| { |
| "epoch": 13.52, |
| "learning_rate": 2.3461030383091152e-05, |
| "loss": 0.2814, |
| "step": 24110 |
| }, |
| { |
| "epoch": 13.53, |
| "learning_rate": 2.3458199660313268e-05, |
| "loss": 0.2782, |
| "step": 24120 |
| }, |
| { |
| "epoch": 13.53, |
| "learning_rate": 2.3455368937535383e-05, |
| "loss": 0.2744, |
| "step": 24130 |
| }, |
| { |
| "epoch": 13.54, |
| "learning_rate": 2.34525382147575e-05, |
| "loss": 0.2847, |
| "step": 24140 |
| }, |
| { |
| "epoch": 13.54, |
| "learning_rate": 2.3449707491979622e-05, |
| "loss": 0.2788, |
| "step": 24150 |
| }, |
| { |
| "epoch": 13.55, |
| "learning_rate": 2.3446876769201738e-05, |
| "loss": 0.2727, |
| "step": 24160 |
| }, |
| { |
| "epoch": 13.56, |
| "learning_rate": 2.3444046046423853e-05, |
| "loss": 0.2788, |
| "step": 24170 |
| }, |
| { |
| "epoch": 13.56, |
| "learning_rate": 2.344121532364597e-05, |
| "loss": 0.2791, |
| "step": 24180 |
| }, |
| { |
| "epoch": 13.57, |
| "learning_rate": 2.3438384600868092e-05, |
| "loss": 0.2769, |
| "step": 24190 |
| }, |
| { |
| "epoch": 13.57, |
| "learning_rate": 2.3435553878090208e-05, |
| "loss": 0.2803, |
| "step": 24200 |
| }, |
| { |
| "epoch": 13.58, |
| "learning_rate": 2.3432723155312323e-05, |
| "loss": 0.2645, |
| "step": 24210 |
| }, |
| { |
| "epoch": 13.58, |
| "learning_rate": 2.342989243253444e-05, |
| "loss": 0.2722, |
| "step": 24220 |
| }, |
| { |
| "epoch": 13.59, |
| "learning_rate": 2.342706170975656e-05, |
| "loss": 0.269, |
| "step": 24230 |
| }, |
| { |
| "epoch": 13.59, |
| "learning_rate": 2.3424230986978678e-05, |
| "loss": 0.2706, |
| "step": 24240 |
| }, |
| { |
| "epoch": 13.6, |
| "learning_rate": 2.3421400264200793e-05, |
| "loss": 0.2703, |
| "step": 24250 |
| }, |
| { |
| "epoch": 13.61, |
| "learning_rate": 2.341856954142291e-05, |
| "loss": 0.2619, |
| "step": 24260 |
| }, |
| { |
| "epoch": 13.61, |
| "learning_rate": 2.341573881864503e-05, |
| "loss": 0.2724, |
| "step": 24270 |
| }, |
| { |
| "epoch": 13.62, |
| "learning_rate": 2.3412908095867148e-05, |
| "loss": 0.2707, |
| "step": 24280 |
| }, |
| { |
| "epoch": 13.62, |
| "learning_rate": 2.3410077373089263e-05, |
| "loss": 0.2797, |
| "step": 24290 |
| }, |
| { |
| "epoch": 13.63, |
| "learning_rate": 2.340724665031138e-05, |
| "loss": 0.2702, |
| "step": 24300 |
| }, |
| { |
| "epoch": 13.63, |
| "learning_rate": 2.34044159275335e-05, |
| "loss": 0.2773, |
| "step": 24310 |
| }, |
| { |
| "epoch": 13.64, |
| "learning_rate": 2.3401585204755614e-05, |
| "loss": 0.278, |
| "step": 24320 |
| }, |
| { |
| "epoch": 13.65, |
| "learning_rate": 2.3398754481977733e-05, |
| "loss": 0.2676, |
| "step": 24330 |
| }, |
| { |
| "epoch": 13.65, |
| "learning_rate": 2.339592375919985e-05, |
| "loss": 0.2731, |
| "step": 24340 |
| }, |
| { |
| "epoch": 13.66, |
| "learning_rate": 2.339309303642197e-05, |
| "loss": 0.2761, |
| "step": 24350 |
| }, |
| { |
| "epoch": 13.66, |
| "learning_rate": 2.3390262313644084e-05, |
| "loss": 0.2786, |
| "step": 24360 |
| }, |
| { |
| "epoch": 13.67, |
| "learning_rate": 2.3387431590866203e-05, |
| "loss": 0.2676, |
| "step": 24370 |
| }, |
| { |
| "epoch": 13.67, |
| "learning_rate": 2.338460086808832e-05, |
| "loss": 0.275, |
| "step": 24380 |
| }, |
| { |
| "epoch": 13.68, |
| "learning_rate": 2.3381770145310435e-05, |
| "loss": 0.2733, |
| "step": 24390 |
| }, |
| { |
| "epoch": 13.68, |
| "learning_rate": 2.3378939422532554e-05, |
| "loss": 0.2728, |
| "step": 24400 |
| }, |
| { |
| "epoch": 13.69, |
| "learning_rate": 2.337610869975467e-05, |
| "loss": 0.2552, |
| "step": 24410 |
| }, |
| { |
| "epoch": 13.7, |
| "learning_rate": 2.337327797697679e-05, |
| "loss": 0.2804, |
| "step": 24420 |
| }, |
| { |
| "epoch": 13.7, |
| "learning_rate": 2.3370447254198905e-05, |
| "loss": 0.2832, |
| "step": 24430 |
| }, |
| { |
| "epoch": 13.71, |
| "learning_rate": 2.3367616531421024e-05, |
| "loss": 0.2856, |
| "step": 24440 |
| }, |
| { |
| "epoch": 13.71, |
| "learning_rate": 2.336478580864314e-05, |
| "loss": 0.2757, |
| "step": 24450 |
| }, |
| { |
| "epoch": 13.72, |
| "learning_rate": 2.336195508586526e-05, |
| "loss": 0.2851, |
| "step": 24460 |
| }, |
| { |
| "epoch": 13.72, |
| "learning_rate": 2.3359124363087375e-05, |
| "loss": 0.2784, |
| "step": 24470 |
| }, |
| { |
| "epoch": 13.73, |
| "learning_rate": 2.3356293640309494e-05, |
| "loss": 0.2731, |
| "step": 24480 |
| }, |
| { |
| "epoch": 13.73, |
| "learning_rate": 2.33537459898094e-05, |
| "loss": 0.2689, |
| "step": 24490 |
| }, |
| { |
| "epoch": 13.74, |
| "learning_rate": 2.3350915267031515e-05, |
| "loss": 0.2833, |
| "step": 24500 |
| }, |
| { |
| "epoch": 13.75, |
| "learning_rate": 2.334808454425363e-05, |
| "loss": 0.283, |
| "step": 24510 |
| }, |
| { |
| "epoch": 13.75, |
| "learning_rate": 2.334525382147575e-05, |
| "loss": 0.2834, |
| "step": 24520 |
| }, |
| { |
| "epoch": 13.76, |
| "learning_rate": 2.334242309869787e-05, |
| "loss": 0.2882, |
| "step": 24530 |
| }, |
| { |
| "epoch": 13.76, |
| "learning_rate": 2.3339592375919985e-05, |
| "loss": 0.2832, |
| "step": 24540 |
| }, |
| { |
| "epoch": 13.77, |
| "learning_rate": 2.33367616531421e-05, |
| "loss": 0.2815, |
| "step": 24550 |
| }, |
| { |
| "epoch": 13.77, |
| "learning_rate": 2.333393093036422e-05, |
| "loss": 0.2571, |
| "step": 24560 |
| }, |
| { |
| "epoch": 13.78, |
| "learning_rate": 2.3331100207586336e-05, |
| "loss": 0.2733, |
| "step": 24570 |
| }, |
| { |
| "epoch": 13.79, |
| "learning_rate": 2.3328269484808455e-05, |
| "loss": 0.2938, |
| "step": 24580 |
| }, |
| { |
| "epoch": 13.79, |
| "learning_rate": 2.332543876203057e-05, |
| "loss": 0.2778, |
| "step": 24590 |
| }, |
| { |
| "epoch": 13.8, |
| "learning_rate": 2.332260803925269e-05, |
| "loss": 0.2772, |
| "step": 24600 |
| }, |
| { |
| "epoch": 13.8, |
| "learning_rate": 2.3319777316474806e-05, |
| "loss": 0.277, |
| "step": 24610 |
| }, |
| { |
| "epoch": 13.81, |
| "learning_rate": 2.3316946593696925e-05, |
| "loss": 0.287, |
| "step": 24620 |
| }, |
| { |
| "epoch": 13.81, |
| "learning_rate": 2.331411587091904e-05, |
| "loss": 0.2772, |
| "step": 24630 |
| }, |
| { |
| "epoch": 13.82, |
| "learning_rate": 2.331128514814116e-05, |
| "loss": 0.2877, |
| "step": 24640 |
| }, |
| { |
| "epoch": 13.82, |
| "learning_rate": 2.3308454425363276e-05, |
| "loss": 0.262, |
| "step": 24650 |
| }, |
| { |
| "epoch": 13.83, |
| "learning_rate": 2.3305623702585392e-05, |
| "loss": 0.2867, |
| "step": 24660 |
| }, |
| { |
| "epoch": 13.84, |
| "learning_rate": 2.330279297980751e-05, |
| "loss": 0.2871, |
| "step": 24670 |
| }, |
| { |
| "epoch": 13.84, |
| "learning_rate": 2.329996225702963e-05, |
| "loss": 0.2778, |
| "step": 24680 |
| }, |
| { |
| "epoch": 13.85, |
| "learning_rate": 2.3297131534251746e-05, |
| "loss": 0.267, |
| "step": 24690 |
| }, |
| { |
| "epoch": 13.85, |
| "learning_rate": 2.3294300811473862e-05, |
| "loss": 0.2773, |
| "step": 24700 |
| }, |
| { |
| "epoch": 13.86, |
| "learning_rate": 2.329147008869598e-05, |
| "loss": 0.2753, |
| "step": 24710 |
| }, |
| { |
| "epoch": 13.86, |
| "learning_rate": 2.32886393659181e-05, |
| "loss": 0.2818, |
| "step": 24720 |
| }, |
| { |
| "epoch": 13.87, |
| "learning_rate": 2.3285808643140216e-05, |
| "loss": 0.2684, |
| "step": 24730 |
| }, |
| { |
| "epoch": 13.88, |
| "learning_rate": 2.3282977920362332e-05, |
| "loss": 0.2772, |
| "step": 24740 |
| }, |
| { |
| "epoch": 13.88, |
| "learning_rate": 2.3280147197584448e-05, |
| "loss": 0.2822, |
| "step": 24750 |
| }, |
| { |
| "epoch": 13.89, |
| "learning_rate": 2.327731647480657e-05, |
| "loss": 0.2749, |
| "step": 24760 |
| }, |
| { |
| "epoch": 13.89, |
| "learning_rate": 2.3274485752028686e-05, |
| "loss": 0.2731, |
| "step": 24770 |
| }, |
| { |
| "epoch": 13.9, |
| "learning_rate": 2.3271655029250802e-05, |
| "loss": 0.2858, |
| "step": 24780 |
| }, |
| { |
| "epoch": 13.9, |
| "learning_rate": 2.3268824306472918e-05, |
| "loss": 0.2733, |
| "step": 24790 |
| }, |
| { |
| "epoch": 13.91, |
| "learning_rate": 2.326599358369504e-05, |
| "loss": 0.2559, |
| "step": 24800 |
| }, |
| { |
| "epoch": 13.91, |
| "learning_rate": 2.3263162860917156e-05, |
| "loss": 0.2794, |
| "step": 24810 |
| }, |
| { |
| "epoch": 13.92, |
| "learning_rate": 2.3260332138139272e-05, |
| "loss": 0.2717, |
| "step": 24820 |
| }, |
| { |
| "epoch": 13.93, |
| "learning_rate": 2.3257501415361388e-05, |
| "loss": 0.294, |
| "step": 24830 |
| }, |
| { |
| "epoch": 13.93, |
| "learning_rate": 2.3254670692583507e-05, |
| "loss": 0.2877, |
| "step": 24840 |
| }, |
| { |
| "epoch": 13.94, |
| "learning_rate": 2.3251839969805626e-05, |
| "loss": 0.2721, |
| "step": 24850 |
| }, |
| { |
| "epoch": 13.94, |
| "learning_rate": 2.3249009247027742e-05, |
| "loss": 0.2617, |
| "step": 24860 |
| }, |
| { |
| "epoch": 13.95, |
| "learning_rate": 2.3246178524249858e-05, |
| "loss": 0.2858, |
| "step": 24870 |
| }, |
| { |
| "epoch": 13.95, |
| "learning_rate": 2.3243347801471977e-05, |
| "loss": 0.2845, |
| "step": 24880 |
| }, |
| { |
| "epoch": 13.96, |
| "learning_rate": 2.3240517078694096e-05, |
| "loss": 0.2817, |
| "step": 24890 |
| }, |
| { |
| "epoch": 13.96, |
| "learning_rate": 2.3237686355916212e-05, |
| "loss": 0.2689, |
| "step": 24900 |
| }, |
| { |
| "epoch": 13.97, |
| "learning_rate": 2.3234855633138328e-05, |
| "loss": 0.2796, |
| "step": 24910 |
| }, |
| { |
| "epoch": 13.98, |
| "learning_rate": 2.3232024910360443e-05, |
| "loss": 0.2703, |
| "step": 24920 |
| }, |
| { |
| "epoch": 13.98, |
| "learning_rate": 2.3229194187582563e-05, |
| "loss": 0.2782, |
| "step": 24930 |
| }, |
| { |
| "epoch": 13.99, |
| "learning_rate": 2.3226363464804682e-05, |
| "loss": 0.2766, |
| "step": 24940 |
| }, |
| { |
| "epoch": 13.99, |
| "learning_rate": 2.3223532742026798e-05, |
| "loss": 0.2632, |
| "step": 24950 |
| }, |
| { |
| "epoch": 14.0, |
| "learning_rate": 2.3220702019248913e-05, |
| "loss": 0.283, |
| "step": 24960 |
| }, |
| { |
| "epoch": 14.0, |
| "eval_cer": 0.16598230180490403, |
| "eval_loss": 0.4149094521999359, |
| "eval_runtime": 314.9705, |
| "eval_samples_per_second": 16.9, |
| "eval_steps_per_second": 4.226, |
| "eval_wer": 0.2242954534101396, |
| "step": 24962 |
| }, |
| { |
| "epoch": 14.0, |
| "learning_rate": 2.3217871296471033e-05, |
| "loss": 0.2797, |
| "step": 24970 |
| }, |
| { |
| "epoch": 14.01, |
| "learning_rate": 2.3215040573693152e-05, |
| "loss": 0.255, |
| "step": 24980 |
| }, |
| { |
| "epoch": 14.02, |
| "learning_rate": 2.3212209850915268e-05, |
| "loss": 0.2687, |
| "step": 24990 |
| }, |
| { |
| "epoch": 14.02, |
| "learning_rate": 2.3209379128137383e-05, |
| "loss": 0.2775, |
| "step": 25000 |
| }, |
| { |
| "epoch": 14.03, |
| "learning_rate": 2.3206548405359503e-05, |
| "loss": 0.2769, |
| "step": 25010 |
| }, |
| { |
| "epoch": 14.03, |
| "learning_rate": 2.320371768258162e-05, |
| "loss": 0.2545, |
| "step": 25020 |
| }, |
| { |
| "epoch": 14.04, |
| "learning_rate": 2.3200886959803738e-05, |
| "loss": 0.2645, |
| "step": 25030 |
| }, |
| { |
| "epoch": 14.04, |
| "learning_rate": 2.3198056237025853e-05, |
| "loss": 0.2737, |
| "step": 25040 |
| }, |
| { |
| "epoch": 14.05, |
| "learning_rate": 2.3195225514247973e-05, |
| "loss": 0.2572, |
| "step": 25050 |
| }, |
| { |
| "epoch": 14.05, |
| "learning_rate": 2.319239479147009e-05, |
| "loss": 0.2671, |
| "step": 25060 |
| }, |
| { |
| "epoch": 14.06, |
| "learning_rate": 2.3189564068692208e-05, |
| "loss": 0.2557, |
| "step": 25070 |
| }, |
| { |
| "epoch": 14.07, |
| "learning_rate": 2.3186733345914323e-05, |
| "loss": 0.267, |
| "step": 25080 |
| }, |
| { |
| "epoch": 14.07, |
| "learning_rate": 2.3183902623136443e-05, |
| "loss": 0.2641, |
| "step": 25090 |
| }, |
| { |
| "epoch": 14.08, |
| "learning_rate": 2.318107190035856e-05, |
| "loss": 0.2452, |
| "step": 25100 |
| }, |
| { |
| "epoch": 14.08, |
| "learning_rate": 2.3178241177580674e-05, |
| "loss": 0.2618, |
| "step": 25110 |
| }, |
| { |
| "epoch": 14.09, |
| "learning_rate": 2.3175410454802793e-05, |
| "loss": 0.2617, |
| "step": 25120 |
| }, |
| { |
| "epoch": 14.09, |
| "learning_rate": 2.3172579732024913e-05, |
| "loss": 0.2763, |
| "step": 25130 |
| }, |
| { |
| "epoch": 14.1, |
| "learning_rate": 2.316974900924703e-05, |
| "loss": 0.2515, |
| "step": 25140 |
| }, |
| { |
| "epoch": 14.11, |
| "learning_rate": 2.3166918286469144e-05, |
| "loss": 0.2667, |
| "step": 25150 |
| }, |
| { |
| "epoch": 14.11, |
| "learning_rate": 2.3164087563691263e-05, |
| "loss": 0.2516, |
| "step": 25160 |
| }, |
| { |
| "epoch": 14.12, |
| "learning_rate": 2.3161256840913383e-05, |
| "loss": 0.2481, |
| "step": 25170 |
| }, |
| { |
| "epoch": 14.12, |
| "learning_rate": 2.31584261181355e-05, |
| "loss": 0.2627, |
| "step": 25180 |
| }, |
| { |
| "epoch": 14.13, |
| "learning_rate": 2.3155595395357614e-05, |
| "loss": 0.264, |
| "step": 25190 |
| }, |
| { |
| "epoch": 14.13, |
| "learning_rate": 2.315276467257973e-05, |
| "loss": 0.2597, |
| "step": 25200 |
| }, |
| { |
| "epoch": 14.14, |
| "learning_rate": 2.3149933949801852e-05, |
| "loss": 0.2595, |
| "step": 25210 |
| }, |
| { |
| "epoch": 14.14, |
| "learning_rate": 2.3147103227023968e-05, |
| "loss": 0.2645, |
| "step": 25220 |
| }, |
| { |
| "epoch": 14.15, |
| "learning_rate": 2.3144272504246084e-05, |
| "loss": 0.2557, |
| "step": 25230 |
| }, |
| { |
| "epoch": 14.16, |
| "learning_rate": 2.31414417814682e-05, |
| "loss": 0.2538, |
| "step": 25240 |
| }, |
| { |
| "epoch": 14.16, |
| "learning_rate": 2.3138611058690322e-05, |
| "loss": 0.2512, |
| "step": 25250 |
| }, |
| { |
| "epoch": 14.17, |
| "learning_rate": 2.3135780335912438e-05, |
| "loss": 0.2648, |
| "step": 25260 |
| }, |
| { |
| "epoch": 14.17, |
| "learning_rate": 2.3132949613134554e-05, |
| "loss": 0.2661, |
| "step": 25270 |
| }, |
| { |
| "epoch": 14.18, |
| "learning_rate": 2.313011889035667e-05, |
| "loss": 0.2731, |
| "step": 25280 |
| }, |
| { |
| "epoch": 14.18, |
| "learning_rate": 2.312728816757879e-05, |
| "loss": 0.2703, |
| "step": 25290 |
| }, |
| { |
| "epoch": 14.19, |
| "learning_rate": 2.3124457444800908e-05, |
| "loss": 0.2626, |
| "step": 25300 |
| }, |
| { |
| "epoch": 14.2, |
| "learning_rate": 2.3121626722023024e-05, |
| "loss": 0.269, |
| "step": 25310 |
| }, |
| { |
| "epoch": 14.2, |
| "learning_rate": 2.311879599924514e-05, |
| "loss": 0.2647, |
| "step": 25320 |
| }, |
| { |
| "epoch": 14.21, |
| "learning_rate": 2.311596527646726e-05, |
| "loss": 0.2688, |
| "step": 25330 |
| }, |
| { |
| "epoch": 14.21, |
| "learning_rate": 2.3113134553689378e-05, |
| "loss": 0.266, |
| "step": 25340 |
| }, |
| { |
| "epoch": 14.22, |
| "learning_rate": 2.3110303830911494e-05, |
| "loss": 0.2605, |
| "step": 25350 |
| }, |
| { |
| "epoch": 14.22, |
| "learning_rate": 2.310747310813361e-05, |
| "loss": 0.2802, |
| "step": 25360 |
| }, |
| { |
| "epoch": 14.23, |
| "learning_rate": 2.3104642385355726e-05, |
| "loss": 0.2762, |
| "step": 25370 |
| }, |
| { |
| "epoch": 14.23, |
| "learning_rate": 2.3101811662577845e-05, |
| "loss": 0.2732, |
| "step": 25380 |
| }, |
| { |
| "epoch": 14.24, |
| "learning_rate": 2.3098980939799964e-05, |
| "loss": 0.2562, |
| "step": 25390 |
| }, |
| { |
| "epoch": 14.25, |
| "learning_rate": 2.309615021702208e-05, |
| "loss": 0.2643, |
| "step": 25400 |
| }, |
| { |
| "epoch": 14.25, |
| "learning_rate": 2.3093319494244196e-05, |
| "loss": 0.2659, |
| "step": 25410 |
| }, |
| { |
| "epoch": 14.26, |
| "learning_rate": 2.3090488771466315e-05, |
| "loss": 0.2602, |
| "step": 25420 |
| }, |
| { |
| "epoch": 14.26, |
| "learning_rate": 2.3087658048688434e-05, |
| "loss": 0.2583, |
| "step": 25430 |
| }, |
| { |
| "epoch": 14.27, |
| "learning_rate": 2.308482732591055e-05, |
| "loss": 0.2621, |
| "step": 25440 |
| }, |
| { |
| "epoch": 14.27, |
| "learning_rate": 2.3081996603132666e-05, |
| "loss": 0.2667, |
| "step": 25450 |
| }, |
| { |
| "epoch": 14.28, |
| "learning_rate": 2.3079165880354785e-05, |
| "loss": 0.2733, |
| "step": 25460 |
| }, |
| { |
| "epoch": 14.28, |
| "learning_rate": 2.30763351575769e-05, |
| "loss": 0.2873, |
| "step": 25470 |
| }, |
| { |
| "epoch": 14.29, |
| "learning_rate": 2.307350443479902e-05, |
| "loss": 0.2782, |
| "step": 25480 |
| }, |
| { |
| "epoch": 14.3, |
| "learning_rate": 2.3070673712021136e-05, |
| "loss": 0.2539, |
| "step": 25490 |
| }, |
| { |
| "epoch": 14.3, |
| "learning_rate": 2.3067842989243255e-05, |
| "loss": 0.2653, |
| "step": 25500 |
| }, |
| { |
| "epoch": 14.31, |
| "learning_rate": 2.306501226646537e-05, |
| "loss": 0.2706, |
| "step": 25510 |
| }, |
| { |
| "epoch": 14.31, |
| "learning_rate": 2.306218154368749e-05, |
| "loss": 0.2816, |
| "step": 25520 |
| }, |
| { |
| "epoch": 14.32, |
| "learning_rate": 2.3059350820909606e-05, |
| "loss": 0.2615, |
| "step": 25530 |
| }, |
| { |
| "epoch": 14.32, |
| "learning_rate": 2.3056520098131725e-05, |
| "loss": 0.2709, |
| "step": 25540 |
| }, |
| { |
| "epoch": 14.33, |
| "learning_rate": 2.305368937535384e-05, |
| "loss": 0.268, |
| "step": 25550 |
| }, |
| { |
| "epoch": 14.34, |
| "learning_rate": 2.3050858652575956e-05, |
| "loss": 0.2526, |
| "step": 25560 |
| }, |
| { |
| "epoch": 14.34, |
| "learning_rate": 2.3048027929798076e-05, |
| "loss": 0.2694, |
| "step": 25570 |
| }, |
| { |
| "epoch": 14.35, |
| "learning_rate": 2.3045197207020195e-05, |
| "loss": 0.2712, |
| "step": 25580 |
| }, |
| { |
| "epoch": 14.35, |
| "learning_rate": 2.304236648424231e-05, |
| "loss": 0.2674, |
| "step": 25590 |
| }, |
| { |
| "epoch": 14.36, |
| "learning_rate": 2.3039535761464426e-05, |
| "loss": 0.281, |
| "step": 25600 |
| }, |
| { |
| "epoch": 14.36, |
| "learning_rate": 2.3036705038686545e-05, |
| "loss": 0.2702, |
| "step": 25610 |
| }, |
| { |
| "epoch": 14.37, |
| "learning_rate": 2.3033874315908665e-05, |
| "loss": 0.2522, |
| "step": 25620 |
| }, |
| { |
| "epoch": 14.37, |
| "learning_rate": 2.303104359313078e-05, |
| "loss": 0.273, |
| "step": 25630 |
| }, |
| { |
| "epoch": 14.38, |
| "learning_rate": 2.3028212870352896e-05, |
| "loss": 0.2655, |
| "step": 25640 |
| }, |
| { |
| "epoch": 14.39, |
| "learning_rate": 2.3025382147575012e-05, |
| "loss": 0.2594, |
| "step": 25650 |
| }, |
| { |
| "epoch": 14.39, |
| "learning_rate": 2.3022551424797135e-05, |
| "loss": 0.2536, |
| "step": 25660 |
| }, |
| { |
| "epoch": 14.4, |
| "learning_rate": 2.301972070201925e-05, |
| "loss": 0.2647, |
| "step": 25670 |
| }, |
| { |
| "epoch": 14.4, |
| "learning_rate": 2.3016889979241366e-05, |
| "loss": 0.2672, |
| "step": 25680 |
| }, |
| { |
| "epoch": 14.41, |
| "learning_rate": 2.3014059256463482e-05, |
| "loss": 0.2619, |
| "step": 25690 |
| }, |
| { |
| "epoch": 14.41, |
| "learning_rate": 2.3011228533685605e-05, |
| "loss": 0.258, |
| "step": 25700 |
| }, |
| { |
| "epoch": 14.42, |
| "learning_rate": 2.300839781090772e-05, |
| "loss": 0.2788, |
| "step": 25710 |
| }, |
| { |
| "epoch": 14.42, |
| "learning_rate": 2.3005567088129836e-05, |
| "loss": 0.253, |
| "step": 25720 |
| }, |
| { |
| "epoch": 14.43, |
| "learning_rate": 2.3002736365351952e-05, |
| "loss": 0.2616, |
| "step": 25730 |
| }, |
| { |
| "epoch": 14.44, |
| "learning_rate": 2.299990564257407e-05, |
| "loss": 0.2791, |
| "step": 25740 |
| }, |
| { |
| "epoch": 14.44, |
| "learning_rate": 2.299707491979619e-05, |
| "loss": 0.2546, |
| "step": 25750 |
| }, |
| { |
| "epoch": 14.45, |
| "learning_rate": 2.2994244197018306e-05, |
| "loss": 0.2772, |
| "step": 25760 |
| }, |
| { |
| "epoch": 14.45, |
| "learning_rate": 2.2991413474240422e-05, |
| "loss": 0.2697, |
| "step": 25770 |
| }, |
| { |
| "epoch": 14.46, |
| "learning_rate": 2.2988582751462538e-05, |
| "loss": 0.2672, |
| "step": 25780 |
| }, |
| { |
| "epoch": 14.46, |
| "learning_rate": 2.298575202868466e-05, |
| "loss": 0.2623, |
| "step": 25790 |
| }, |
| { |
| "epoch": 14.47, |
| "learning_rate": 2.2982921305906776e-05, |
| "loss": 0.2632, |
| "step": 25800 |
| }, |
| { |
| "epoch": 14.48, |
| "learning_rate": 2.2980090583128892e-05, |
| "loss": 0.2668, |
| "step": 25810 |
| }, |
| { |
| "epoch": 14.48, |
| "learning_rate": 2.2977259860351008e-05, |
| "loss": 0.287, |
| "step": 25820 |
| }, |
| { |
| "epoch": 14.49, |
| "learning_rate": 2.2974429137573127e-05, |
| "loss": 0.2674, |
| "step": 25830 |
| }, |
| { |
| "epoch": 14.49, |
| "learning_rate": 2.2971598414795246e-05, |
| "loss": 0.2678, |
| "step": 25840 |
| }, |
| { |
| "epoch": 14.5, |
| "learning_rate": 2.2968767692017362e-05, |
| "loss": 0.2581, |
| "step": 25850 |
| }, |
| { |
| "epoch": 14.5, |
| "learning_rate": 2.2965936969239478e-05, |
| "loss": 0.2703, |
| "step": 25860 |
| }, |
| { |
| "epoch": 14.51, |
| "learning_rate": 2.2963106246461597e-05, |
| "loss": 0.2483, |
| "step": 25870 |
| }, |
| { |
| "epoch": 14.51, |
| "learning_rate": 2.2960275523683716e-05, |
| "loss": 0.2731, |
| "step": 25880 |
| }, |
| { |
| "epoch": 14.52, |
| "learning_rate": 2.2957444800905832e-05, |
| "loss": 0.258, |
| "step": 25890 |
| }, |
| { |
| "epoch": 14.53, |
| "learning_rate": 2.2954614078127948e-05, |
| "loss": 0.2615, |
| "step": 25900 |
| }, |
| { |
| "epoch": 14.53, |
| "learning_rate": 2.2951783355350067e-05, |
| "loss": 0.2531, |
| "step": 25910 |
| }, |
| { |
| "epoch": 14.54, |
| "learning_rate": 2.2948952632572183e-05, |
| "loss": 0.2561, |
| "step": 25920 |
| }, |
| { |
| "epoch": 14.54, |
| "learning_rate": 2.2946121909794302e-05, |
| "loss": 0.2772, |
| "step": 25930 |
| }, |
| { |
| "epoch": 14.55, |
| "learning_rate": 2.2943291187016418e-05, |
| "loss": 0.2547, |
| "step": 25940 |
| }, |
| { |
| "epoch": 14.55, |
| "learning_rate": 2.2940460464238537e-05, |
| "loss": 0.2583, |
| "step": 25950 |
| }, |
| { |
| "epoch": 14.56, |
| "learning_rate": 2.2937629741460653e-05, |
| "loss": 0.2605, |
| "step": 25960 |
| }, |
| { |
| "epoch": 14.57, |
| "learning_rate": 2.2934799018682772e-05, |
| "loss": 0.2824, |
| "step": 25970 |
| }, |
| { |
| "epoch": 14.57, |
| "learning_rate": 2.2931968295904888e-05, |
| "loss": 0.2677, |
| "step": 25980 |
| }, |
| { |
| "epoch": 14.58, |
| "learning_rate": 2.2929137573127007e-05, |
| "loss": 0.2661, |
| "step": 25990 |
| }, |
| { |
| "epoch": 14.58, |
| "learning_rate": 2.2926306850349123e-05, |
| "loss": 0.2601, |
| "step": 26000 |
| }, |
| { |
| "epoch": 14.59, |
| "learning_rate": 2.292347612757124e-05, |
| "loss": 0.2821, |
| "step": 26010 |
| }, |
| { |
| "epoch": 14.59, |
| "learning_rate": 2.2920645404793358e-05, |
| "loss": 0.2653, |
| "step": 26020 |
| }, |
| { |
| "epoch": 14.6, |
| "learning_rate": 2.2917814682015477e-05, |
| "loss": 0.257, |
| "step": 26030 |
| }, |
| { |
| "epoch": 14.6, |
| "learning_rate": 2.2914983959237593e-05, |
| "loss": 0.2621, |
| "step": 26040 |
| }, |
| { |
| "epoch": 14.61, |
| "learning_rate": 2.291215323645971e-05, |
| "loss": 0.2651, |
| "step": 26050 |
| }, |
| { |
| "epoch": 14.62, |
| "learning_rate": 2.2909322513681828e-05, |
| "loss": 0.2613, |
| "step": 26060 |
| }, |
| { |
| "epoch": 14.62, |
| "learning_rate": 2.2906491790903947e-05, |
| "loss": 0.2578, |
| "step": 26070 |
| }, |
| { |
| "epoch": 14.63, |
| "learning_rate": 2.2903661068126063e-05, |
| "loss": 0.2661, |
| "step": 26080 |
| }, |
| { |
| "epoch": 14.63, |
| "learning_rate": 2.290083034534818e-05, |
| "loss": 0.2772, |
| "step": 26090 |
| }, |
| { |
| "epoch": 14.64, |
| "learning_rate": 2.2897999622570294e-05, |
| "loss": 0.2782, |
| "step": 26100 |
| }, |
| { |
| "epoch": 14.64, |
| "learning_rate": 2.2895168899792417e-05, |
| "loss": 0.2729, |
| "step": 26110 |
| }, |
| { |
| "epoch": 14.65, |
| "learning_rate": 2.2892338177014533e-05, |
| "loss": 0.2552, |
| "step": 26120 |
| }, |
| { |
| "epoch": 14.65, |
| "learning_rate": 2.288950745423665e-05, |
| "loss": 0.2642, |
| "step": 26130 |
| }, |
| { |
| "epoch": 14.66, |
| "learning_rate": 2.2886676731458764e-05, |
| "loss": 0.2642, |
| "step": 26140 |
| }, |
| { |
| "epoch": 14.67, |
| "learning_rate": 2.2883846008680887e-05, |
| "loss": 0.2644, |
| "step": 26150 |
| }, |
| { |
| "epoch": 14.67, |
| "learning_rate": 2.2881015285903003e-05, |
| "loss": 0.2595, |
| "step": 26160 |
| }, |
| { |
| "epoch": 14.68, |
| "learning_rate": 2.287818456312512e-05, |
| "loss": 0.2626, |
| "step": 26170 |
| }, |
| { |
| "epoch": 14.68, |
| "learning_rate": 2.2875353840347234e-05, |
| "loss": 0.259, |
| "step": 26180 |
| }, |
| { |
| "epoch": 14.69, |
| "learning_rate": 2.287252311756935e-05, |
| "loss": 0.2738, |
| "step": 26190 |
| }, |
| { |
| "epoch": 14.69, |
| "learning_rate": 2.2869692394791473e-05, |
| "loss": 0.2632, |
| "step": 26200 |
| }, |
| { |
| "epoch": 14.7, |
| "learning_rate": 2.286686167201359e-05, |
| "loss": 0.2644, |
| "step": 26210 |
| }, |
| { |
| "epoch": 14.71, |
| "learning_rate": 2.2864030949235704e-05, |
| "loss": 0.2776, |
| "step": 26220 |
| }, |
| { |
| "epoch": 14.71, |
| "learning_rate": 2.286120022645782e-05, |
| "loss": 0.2687, |
| "step": 26230 |
| }, |
| { |
| "epoch": 14.72, |
| "learning_rate": 2.2858369503679943e-05, |
| "loss": 0.2634, |
| "step": 26240 |
| }, |
| { |
| "epoch": 14.72, |
| "learning_rate": 2.285553878090206e-05, |
| "loss": 0.2671, |
| "step": 26250 |
| }, |
| { |
| "epoch": 14.73, |
| "learning_rate": 2.2852708058124174e-05, |
| "loss": 0.2821, |
| "step": 26260 |
| }, |
| { |
| "epoch": 14.73, |
| "learning_rate": 2.284987733534629e-05, |
| "loss": 0.2634, |
| "step": 26270 |
| }, |
| { |
| "epoch": 14.74, |
| "learning_rate": 2.284704661256841e-05, |
| "loss": 0.2604, |
| "step": 26280 |
| }, |
| { |
| "epoch": 14.74, |
| "learning_rate": 2.284421588979053e-05, |
| "loss": 0.2643, |
| "step": 26290 |
| }, |
| { |
| "epoch": 14.75, |
| "learning_rate": 2.2841385167012644e-05, |
| "loss": 0.2793, |
| "step": 26300 |
| }, |
| { |
| "epoch": 14.76, |
| "learning_rate": 2.283855444423476e-05, |
| "loss": 0.269, |
| "step": 26310 |
| }, |
| { |
| "epoch": 14.76, |
| "learning_rate": 2.283572372145688e-05, |
| "loss": 0.2547, |
| "step": 26320 |
| }, |
| { |
| "epoch": 14.77, |
| "learning_rate": 2.2832892998679e-05, |
| "loss": 0.2701, |
| "step": 26330 |
| }, |
| { |
| "epoch": 14.77, |
| "learning_rate": 2.2830062275901114e-05, |
| "loss": 0.261, |
| "step": 26340 |
| }, |
| { |
| "epoch": 14.78, |
| "learning_rate": 2.282723155312323e-05, |
| "loss": 0.2657, |
| "step": 26350 |
| }, |
| { |
| "epoch": 14.78, |
| "learning_rate": 2.282440083034535e-05, |
| "loss": 0.2729, |
| "step": 26360 |
| }, |
| { |
| "epoch": 14.79, |
| "learning_rate": 2.2821570107567465e-05, |
| "loss": 0.2626, |
| "step": 26370 |
| }, |
| { |
| "epoch": 14.79, |
| "learning_rate": 2.2818739384789584e-05, |
| "loss": 0.2634, |
| "step": 26380 |
| }, |
| { |
| "epoch": 14.8, |
| "learning_rate": 2.28159086620117e-05, |
| "loss": 0.2575, |
| "step": 26390 |
| }, |
| { |
| "epoch": 14.81, |
| "learning_rate": 2.281307793923382e-05, |
| "loss": 0.2567, |
| "step": 26400 |
| }, |
| { |
| "epoch": 14.81, |
| "learning_rate": 2.2810247216455935e-05, |
| "loss": 0.2505, |
| "step": 26410 |
| }, |
| { |
| "epoch": 14.82, |
| "learning_rate": 2.2807416493678054e-05, |
| "loss": 0.2718, |
| "step": 26420 |
| }, |
| { |
| "epoch": 14.82, |
| "learning_rate": 2.280458577090017e-05, |
| "loss": 0.2736, |
| "step": 26430 |
| }, |
| { |
| "epoch": 14.83, |
| "learning_rate": 2.280175504812229e-05, |
| "loss": 0.2649, |
| "step": 26440 |
| }, |
| { |
| "epoch": 14.83, |
| "learning_rate": 2.2798924325344405e-05, |
| "loss": 0.2716, |
| "step": 26450 |
| }, |
| { |
| "epoch": 14.84, |
| "learning_rate": 2.279609360256652e-05, |
| "loss": 0.2737, |
| "step": 26460 |
| }, |
| { |
| "epoch": 14.85, |
| "learning_rate": 2.279326287978864e-05, |
| "loss": 0.2598, |
| "step": 26470 |
| }, |
| { |
| "epoch": 14.85, |
| "learning_rate": 2.279043215701076e-05, |
| "loss": 0.2589, |
| "step": 26480 |
| }, |
| { |
| "epoch": 14.86, |
| "learning_rate": 2.2787601434232875e-05, |
| "loss": 0.2528, |
| "step": 26490 |
| }, |
| { |
| "epoch": 14.86, |
| "learning_rate": 2.278477071145499e-05, |
| "loss": 0.2549, |
| "step": 26500 |
| }, |
| { |
| "epoch": 14.87, |
| "learning_rate": 2.278193998867711e-05, |
| "loss": 0.2699, |
| "step": 26510 |
| }, |
| { |
| "epoch": 14.87, |
| "learning_rate": 2.277910926589923e-05, |
| "loss": 0.2797, |
| "step": 26520 |
| }, |
| { |
| "epoch": 14.88, |
| "learning_rate": 2.2776278543121345e-05, |
| "loss": 0.2693, |
| "step": 26530 |
| }, |
| { |
| "epoch": 14.88, |
| "learning_rate": 2.277344782034346e-05, |
| "loss": 0.2644, |
| "step": 26540 |
| }, |
| { |
| "epoch": 14.89, |
| "learning_rate": 2.2770617097565576e-05, |
| "loss": 0.2602, |
| "step": 26550 |
| }, |
| { |
| "epoch": 14.9, |
| "learning_rate": 2.27677863747877e-05, |
| "loss": 0.2665, |
| "step": 26560 |
| }, |
| { |
| "epoch": 14.9, |
| "learning_rate": 2.2764955652009815e-05, |
| "loss": 0.2721, |
| "step": 26570 |
| }, |
| { |
| "epoch": 14.91, |
| "learning_rate": 2.276212492923193e-05, |
| "loss": 0.2792, |
| "step": 26580 |
| }, |
| { |
| "epoch": 14.91, |
| "learning_rate": 2.2759294206454046e-05, |
| "loss": 0.2696, |
| "step": 26590 |
| }, |
| { |
| "epoch": 14.92, |
| "learning_rate": 2.2756463483676166e-05, |
| "loss": 0.2713, |
| "step": 26600 |
| }, |
| { |
| "epoch": 14.92, |
| "learning_rate": 2.2753632760898285e-05, |
| "loss": 0.2757, |
| "step": 26610 |
| }, |
| { |
| "epoch": 14.93, |
| "learning_rate": 2.27508020381204e-05, |
| "loss": 0.2787, |
| "step": 26620 |
| }, |
| { |
| "epoch": 14.94, |
| "learning_rate": 2.2747971315342516e-05, |
| "loss": 0.2729, |
| "step": 26630 |
| }, |
| { |
| "epoch": 14.94, |
| "learning_rate": 2.2745140592564632e-05, |
| "loss": 0.2669, |
| "step": 26640 |
| }, |
| { |
| "epoch": 14.95, |
| "learning_rate": 2.2742309869786755e-05, |
| "loss": 0.2586, |
| "step": 26650 |
| }, |
| { |
| "epoch": 14.95, |
| "learning_rate": 2.273947914700887e-05, |
| "loss": 0.2759, |
| "step": 26660 |
| }, |
| { |
| "epoch": 14.96, |
| "learning_rate": 2.2736648424230986e-05, |
| "loss": 0.2638, |
| "step": 26670 |
| }, |
| { |
| "epoch": 14.96, |
| "learning_rate": 2.2733817701453102e-05, |
| "loss": 0.2677, |
| "step": 26680 |
| }, |
| { |
| "epoch": 14.97, |
| "learning_rate": 2.2730986978675225e-05, |
| "loss": 0.2588, |
| "step": 26690 |
| }, |
| { |
| "epoch": 14.97, |
| "learning_rate": 2.272815625589734e-05, |
| "loss": 0.2602, |
| "step": 26700 |
| }, |
| { |
| "epoch": 14.98, |
| "learning_rate": 2.2725325533119456e-05, |
| "loss": 0.2756, |
| "step": 26710 |
| }, |
| { |
| "epoch": 14.99, |
| "learning_rate": 2.2722494810341572e-05, |
| "loss": 0.2836, |
| "step": 26720 |
| }, |
| { |
| "epoch": 14.99, |
| "learning_rate": 2.271966408756369e-05, |
| "loss": 0.2586, |
| "step": 26730 |
| }, |
| { |
| "epoch": 15.0, |
| "learning_rate": 2.271683336478581e-05, |
| "loss": 0.2695, |
| "step": 26740 |
| }, |
| { |
| "epoch": 15.0, |
| "eval_cer": 0.16596745121115875, |
| "eval_loss": 0.4149874150753021, |
| "eval_runtime": 316.3138, |
| "eval_samples_per_second": 16.828, |
| "eval_steps_per_second": 4.208, |
| "eval_wer": 0.22420343314464797, |
| "step": 26745 |
| }, |
| { |
| "epoch": 15.0, |
| "learning_rate": 2.2714002642007926e-05, |
| "loss": 0.2961, |
| "step": 26750 |
| }, |
| { |
| "epoch": 15.01, |
| "learning_rate": 2.2711171919230042e-05, |
| "loss": 0.2656, |
| "step": 26760 |
| }, |
| { |
| "epoch": 15.01, |
| "learning_rate": 2.270834119645216e-05, |
| "loss": 0.2473, |
| "step": 26770 |
| }, |
| { |
| "epoch": 15.02, |
| "learning_rate": 2.270551047367428e-05, |
| "loss": 0.2549, |
| "step": 26780 |
| }, |
| { |
| "epoch": 15.03, |
| "learning_rate": 2.2702679750896396e-05, |
| "loss": 0.2531, |
| "step": 26790 |
| }, |
| { |
| "epoch": 15.03, |
| "learning_rate": 2.2699849028118512e-05, |
| "loss": 0.2534, |
| "step": 26800 |
| }, |
| { |
| "epoch": 15.04, |
| "learning_rate": 2.269701830534063e-05, |
| "loss": 0.2545, |
| "step": 26810 |
| }, |
| { |
| "epoch": 15.04, |
| "learning_rate": 2.2694187582562747e-05, |
| "loss": 0.2502, |
| "step": 26820 |
| }, |
| { |
| "epoch": 15.05, |
| "learning_rate": 2.2691356859784866e-05, |
| "loss": 0.2441, |
| "step": 26830 |
| }, |
| { |
| "epoch": 15.05, |
| "learning_rate": 2.2688526137006982e-05, |
| "loss": 0.2613, |
| "step": 26840 |
| }, |
| { |
| "epoch": 15.06, |
| "learning_rate": 2.26856954142291e-05, |
| "loss": 0.252, |
| "step": 26850 |
| }, |
| { |
| "epoch": 15.06, |
| "learning_rate": 2.2682864691451217e-05, |
| "loss": 0.2525, |
| "step": 26860 |
| }, |
| { |
| "epoch": 15.07, |
| "learning_rate": 2.2680033968673336e-05, |
| "loss": 0.2419, |
| "step": 26870 |
| }, |
| { |
| "epoch": 15.08, |
| "learning_rate": 2.2677203245895452e-05, |
| "loss": 0.2408, |
| "step": 26880 |
| }, |
| { |
| "epoch": 15.08, |
| "learning_rate": 2.267437252311757e-05, |
| "loss": 0.2609, |
| "step": 26890 |
| }, |
| { |
| "epoch": 15.09, |
| "learning_rate": 2.2671541800339687e-05, |
| "loss": 0.2566, |
| "step": 26900 |
| }, |
| { |
| "epoch": 15.09, |
| "learning_rate": 2.2668711077561803e-05, |
| "loss": 0.2619, |
| "step": 26910 |
| }, |
| { |
| "epoch": 15.1, |
| "learning_rate": 2.2665880354783922e-05, |
| "loss": 0.2418, |
| "step": 26920 |
| }, |
| { |
| "epoch": 15.1, |
| "learning_rate": 2.266304963200604e-05, |
| "loss": 0.2533, |
| "step": 26930 |
| }, |
| { |
| "epoch": 15.11, |
| "learning_rate": 2.2660218909228157e-05, |
| "loss": 0.2583, |
| "step": 26940 |
| }, |
| { |
| "epoch": 15.11, |
| "learning_rate": 2.2657388186450273e-05, |
| "loss": 0.2561, |
| "step": 26950 |
| }, |
| { |
| "epoch": 15.12, |
| "learning_rate": 2.2654557463672392e-05, |
| "loss": 0.2693, |
| "step": 26960 |
| }, |
| { |
| "epoch": 15.13, |
| "learning_rate": 2.265172674089451e-05, |
| "loss": 0.2527, |
| "step": 26970 |
| }, |
| { |
| "epoch": 15.13, |
| "learning_rate": 2.2648896018116627e-05, |
| "loss": 0.2597, |
| "step": 26980 |
| }, |
| { |
| "epoch": 15.14, |
| "learning_rate": 2.2646065295338743e-05, |
| "loss": 0.2621, |
| "step": 26990 |
| }, |
| { |
| "epoch": 15.14, |
| "learning_rate": 2.2643234572560862e-05, |
| "loss": 0.2512, |
| "step": 27000 |
| }, |
| { |
| "epoch": 15.15, |
| "learning_rate": 2.2640403849782978e-05, |
| "loss": 0.2616, |
| "step": 27010 |
| }, |
| { |
| "epoch": 15.15, |
| "learning_rate": 2.2637573127005097e-05, |
| "loss": 0.2534, |
| "step": 27020 |
| }, |
| { |
| "epoch": 15.16, |
| "learning_rate": 2.2634742404227213e-05, |
| "loss": 0.2604, |
| "step": 27030 |
| }, |
| { |
| "epoch": 15.17, |
| "learning_rate": 2.263191168144933e-05, |
| "loss": 0.2603, |
| "step": 27040 |
| }, |
| { |
| "epoch": 15.17, |
| "learning_rate": 2.2629080958671448e-05, |
| "loss": 0.247, |
| "step": 27050 |
| }, |
| { |
| "epoch": 15.18, |
| "learning_rate": 2.2626250235893567e-05, |
| "loss": 0.2556, |
| "step": 27060 |
| }, |
| { |
| "epoch": 15.18, |
| "learning_rate": 2.2623419513115683e-05, |
| "loss": 0.2559, |
| "step": 27070 |
| }, |
| { |
| "epoch": 15.19, |
| "learning_rate": 2.26205887903378e-05, |
| "loss": 0.2515, |
| "step": 27080 |
| }, |
| { |
| "epoch": 15.19, |
| "learning_rate": 2.2617758067559918e-05, |
| "loss": 0.2544, |
| "step": 27090 |
| }, |
| { |
| "epoch": 15.2, |
| "learning_rate": 2.2614927344782037e-05, |
| "loss": 0.2443, |
| "step": 27100 |
| }, |
| { |
| "epoch": 15.2, |
| "learning_rate": 2.2612096622004153e-05, |
| "loss": 0.2468, |
| "step": 27110 |
| }, |
| { |
| "epoch": 15.21, |
| "learning_rate": 2.260926589922627e-05, |
| "loss": 0.262, |
| "step": 27120 |
| }, |
| { |
| "epoch": 15.22, |
| "learning_rate": 2.2606435176448384e-05, |
| "loss": 0.2475, |
| "step": 27130 |
| }, |
| { |
| "epoch": 15.22, |
| "learning_rate": 2.2603604453670507e-05, |
| "loss": 0.2485, |
| "step": 27140 |
| }, |
| { |
| "epoch": 15.23, |
| "learning_rate": 2.2600773730892623e-05, |
| "loss": 0.249, |
| "step": 27150 |
| }, |
| { |
| "epoch": 15.23, |
| "learning_rate": 2.259794300811474e-05, |
| "loss": 0.2658, |
| "step": 27160 |
| }, |
| { |
| "epoch": 15.24, |
| "learning_rate": 2.2595112285336854e-05, |
| "loss": 0.2552, |
| "step": 27170 |
| }, |
| { |
| "epoch": 15.24, |
| "learning_rate": 2.2592281562558973e-05, |
| "loss": 0.2508, |
| "step": 27180 |
| }, |
| { |
| "epoch": 15.25, |
| "learning_rate": 2.2589450839781093e-05, |
| "loss": 0.2515, |
| "step": 27190 |
| }, |
| { |
| "epoch": 15.26, |
| "learning_rate": 2.258662011700321e-05, |
| "loss": 0.2536, |
| "step": 27200 |
| }, |
| { |
| "epoch": 15.26, |
| "learning_rate": 2.2583789394225324e-05, |
| "loss": 0.2734, |
| "step": 27210 |
| }, |
| { |
| "epoch": 15.27, |
| "learning_rate": 2.2580958671447443e-05, |
| "loss": 0.2483, |
| "step": 27220 |
| }, |
| { |
| "epoch": 15.27, |
| "learning_rate": 2.2578127948669563e-05, |
| "loss": 0.272, |
| "step": 27230 |
| }, |
| { |
| "epoch": 15.28, |
| "learning_rate": 2.257529722589168e-05, |
| "loss": 0.2664, |
| "step": 27240 |
| }, |
| { |
| "epoch": 15.28, |
| "learning_rate": 2.2572466503113794e-05, |
| "loss": 0.2651, |
| "step": 27250 |
| }, |
| { |
| "epoch": 15.29, |
| "learning_rate": 2.2569635780335913e-05, |
| "loss": 0.2723, |
| "step": 27260 |
| }, |
| { |
| "epoch": 15.29, |
| "learning_rate": 2.256680505755803e-05, |
| "loss": 0.2565, |
| "step": 27270 |
| }, |
| { |
| "epoch": 15.3, |
| "learning_rate": 2.256397433478015e-05, |
| "loss": 0.2552, |
| "step": 27280 |
| }, |
| { |
| "epoch": 15.31, |
| "learning_rate": 2.2561143612002264e-05, |
| "loss": 0.2741, |
| "step": 27290 |
| }, |
| { |
| "epoch": 15.31, |
| "learning_rate": 2.2558312889224383e-05, |
| "loss": 0.2482, |
| "step": 27300 |
| }, |
| { |
| "epoch": 15.32, |
| "learning_rate": 2.25554821664465e-05, |
| "loss": 0.2618, |
| "step": 27310 |
| }, |
| { |
| "epoch": 15.32, |
| "learning_rate": 2.255265144366862e-05, |
| "loss": 0.2652, |
| "step": 27320 |
| }, |
| { |
| "epoch": 15.33, |
| "learning_rate": 2.2549820720890734e-05, |
| "loss": 0.2695, |
| "step": 27330 |
| }, |
| { |
| "epoch": 15.33, |
| "learning_rate": 2.2546989998112853e-05, |
| "loss": 0.2542, |
| "step": 27340 |
| }, |
| { |
| "epoch": 15.34, |
| "learning_rate": 2.254415927533497e-05, |
| "loss": 0.2432, |
| "step": 27350 |
| }, |
| { |
| "epoch": 15.34, |
| "learning_rate": 2.254132855255709e-05, |
| "loss": 0.2579, |
| "step": 27360 |
| }, |
| { |
| "epoch": 15.35, |
| "learning_rate": 2.2538497829779204e-05, |
| "loss": 0.2598, |
| "step": 27370 |
| }, |
| { |
| "epoch": 15.36, |
| "learning_rate": 2.2535667107001323e-05, |
| "loss": 0.2406, |
| "step": 27380 |
| }, |
| { |
| "epoch": 15.36, |
| "learning_rate": 2.253283638422344e-05, |
| "loss": 0.266, |
| "step": 27390 |
| }, |
| { |
| "epoch": 15.37, |
| "learning_rate": 2.2530005661445555e-05, |
| "loss": 0.2543, |
| "step": 27400 |
| }, |
| { |
| "epoch": 15.37, |
| "learning_rate": 2.2527174938667674e-05, |
| "loss": 0.2529, |
| "step": 27410 |
| }, |
| { |
| "epoch": 15.38, |
| "learning_rate": 2.2524344215889793e-05, |
| "loss": 0.2518, |
| "step": 27420 |
| }, |
| { |
| "epoch": 15.38, |
| "learning_rate": 2.252151349311191e-05, |
| "loss": 0.2537, |
| "step": 27430 |
| }, |
| { |
| "epoch": 15.39, |
| "learning_rate": 2.2518682770334025e-05, |
| "loss": 0.2599, |
| "step": 27440 |
| }, |
| { |
| "epoch": 15.4, |
| "learning_rate": 2.2515852047556144e-05, |
| "loss": 0.254, |
| "step": 27450 |
| }, |
| { |
| "epoch": 15.4, |
| "learning_rate": 2.251302132477826e-05, |
| "loss": 0.2526, |
| "step": 27460 |
| }, |
| { |
| "epoch": 15.41, |
| "learning_rate": 2.251019060200038e-05, |
| "loss": 0.2652, |
| "step": 27470 |
| }, |
| { |
| "epoch": 15.41, |
| "learning_rate": 2.2507359879222495e-05, |
| "loss": 0.2668, |
| "step": 27480 |
| }, |
| { |
| "epoch": 15.42, |
| "learning_rate": 2.250452915644461e-05, |
| "loss": 0.2523, |
| "step": 27490 |
| }, |
| { |
| "epoch": 15.42, |
| "learning_rate": 2.250169843366673e-05, |
| "loss": 0.2497, |
| "step": 27500 |
| }, |
| { |
| "epoch": 15.43, |
| "learning_rate": 2.249886771088885e-05, |
| "loss": 0.2484, |
| "step": 27510 |
| }, |
| { |
| "epoch": 15.43, |
| "learning_rate": 2.2496036988110965e-05, |
| "loss": 0.2657, |
| "step": 27520 |
| }, |
| { |
| "epoch": 15.44, |
| "learning_rate": 2.249320626533308e-05, |
| "loss": 0.2451, |
| "step": 27530 |
| }, |
| { |
| "epoch": 15.45, |
| "learning_rate": 2.24903755425552e-05, |
| "loss": 0.2587, |
| "step": 27540 |
| }, |
| { |
| "epoch": 15.45, |
| "learning_rate": 2.248754481977732e-05, |
| "loss": 0.2519, |
| "step": 27550 |
| }, |
| { |
| "epoch": 15.46, |
| "learning_rate": 2.2484714096999435e-05, |
| "loss": 0.256, |
| "step": 27560 |
| }, |
| { |
| "epoch": 15.46, |
| "learning_rate": 2.248188337422155e-05, |
| "loss": 0.2437, |
| "step": 27570 |
| }, |
| { |
| "epoch": 15.47, |
| "learning_rate": 2.2479052651443666e-05, |
| "loss": 0.2592, |
| "step": 27580 |
| }, |
| { |
| "epoch": 15.47, |
| "learning_rate": 2.247622192866579e-05, |
| "loss": 0.267, |
| "step": 27590 |
| }, |
| { |
| "epoch": 15.48, |
| "learning_rate": 2.2473391205887905e-05, |
| "loss": 0.2448, |
| "step": 27600 |
| }, |
| { |
| "epoch": 15.48, |
| "learning_rate": 2.247056048311002e-05, |
| "loss": 0.2681, |
| "step": 27610 |
| }, |
| { |
| "epoch": 15.49, |
| "learning_rate": 2.2467729760332136e-05, |
| "loss": 0.2555, |
| "step": 27620 |
| }, |
| { |
| "epoch": 15.5, |
| "learning_rate": 2.246489903755426e-05, |
| "loss": 0.2397, |
| "step": 27630 |
| }, |
| { |
| "epoch": 15.5, |
| "learning_rate": 2.2462068314776375e-05, |
| "loss": 0.2589, |
| "step": 27640 |
| }, |
| { |
| "epoch": 15.51, |
| "learning_rate": 2.245923759199849e-05, |
| "loss": 0.2571, |
| "step": 27650 |
| }, |
| { |
| "epoch": 15.51, |
| "learning_rate": 2.2456406869220606e-05, |
| "loss": 0.2676, |
| "step": 27660 |
| }, |
| { |
| "epoch": 15.52, |
| "learning_rate": 2.2453576146442726e-05, |
| "loss": 0.2528, |
| "step": 27670 |
| }, |
| { |
| "epoch": 15.52, |
| "learning_rate": 2.2450745423664845e-05, |
| "loss": 0.2515, |
| "step": 27680 |
| }, |
| { |
| "epoch": 15.53, |
| "learning_rate": 2.244791470088696e-05, |
| "loss": 0.2624, |
| "step": 27690 |
| }, |
| { |
| "epoch": 15.54, |
| "learning_rate": 2.2445083978109076e-05, |
| "loss": 0.259, |
| "step": 27700 |
| }, |
| { |
| "epoch": 15.54, |
| "learning_rate": 2.2442253255331196e-05, |
| "loss": 0.2587, |
| "step": 27710 |
| }, |
| { |
| "epoch": 15.55, |
| "learning_rate": 2.2439422532553315e-05, |
| "loss": 0.2442, |
| "step": 27720 |
| }, |
| { |
| "epoch": 15.55, |
| "learning_rate": 2.243659180977543e-05, |
| "loss": 0.2577, |
| "step": 27730 |
| }, |
| { |
| "epoch": 15.56, |
| "learning_rate": 2.2433761086997546e-05, |
| "loss": 0.2609, |
| "step": 27740 |
| }, |
| { |
| "epoch": 15.56, |
| "learning_rate": 2.2430930364219666e-05, |
| "loss": 0.2717, |
| "step": 27750 |
| }, |
| { |
| "epoch": 15.57, |
| "learning_rate": 2.242809964144178e-05, |
| "loss": 0.2612, |
| "step": 27760 |
| }, |
| { |
| "epoch": 15.57, |
| "learning_rate": 2.24252689186639e-05, |
| "loss": 0.2488, |
| "step": 27770 |
| }, |
| { |
| "epoch": 15.58, |
| "learning_rate": 2.2422438195886016e-05, |
| "loss": 0.2437, |
| "step": 27780 |
| }, |
| { |
| "epoch": 15.59, |
| "learning_rate": 2.2419607473108136e-05, |
| "loss": 0.2486, |
| "step": 27790 |
| }, |
| { |
| "epoch": 15.59, |
| "learning_rate": 2.241677675033025e-05, |
| "loss": 0.248, |
| "step": 27800 |
| }, |
| { |
| "epoch": 15.6, |
| "learning_rate": 2.241394602755237e-05, |
| "loss": 0.2566, |
| "step": 27810 |
| }, |
| { |
| "epoch": 15.6, |
| "learning_rate": 2.2411115304774486e-05, |
| "loss": 0.2643, |
| "step": 27820 |
| }, |
| { |
| "epoch": 15.61, |
| "learning_rate": 2.2408284581996606e-05, |
| "loss": 0.2648, |
| "step": 27830 |
| }, |
| { |
| "epoch": 15.61, |
| "learning_rate": 2.240545385921872e-05, |
| "loss": 0.2568, |
| "step": 27840 |
| }, |
| { |
| "epoch": 15.62, |
| "learning_rate": 2.2402623136440837e-05, |
| "loss": 0.2586, |
| "step": 27850 |
| }, |
| { |
| "epoch": 15.63, |
| "learning_rate": 2.2399792413662956e-05, |
| "loss": 0.2613, |
| "step": 27860 |
| }, |
| { |
| "epoch": 15.63, |
| "learning_rate": 2.2396961690885072e-05, |
| "loss": 0.2531, |
| "step": 27870 |
| }, |
| { |
| "epoch": 15.64, |
| "learning_rate": 2.239413096810719e-05, |
| "loss": 0.2569, |
| "step": 27880 |
| }, |
| { |
| "epoch": 15.64, |
| "learning_rate": 2.2391300245329307e-05, |
| "loss": 0.2623, |
| "step": 27890 |
| }, |
| { |
| "epoch": 15.65, |
| "learning_rate": 2.2388469522551426e-05, |
| "loss": 0.2521, |
| "step": 27900 |
| }, |
| { |
| "epoch": 15.65, |
| "learning_rate": 2.2385638799773542e-05, |
| "loss": 0.2624, |
| "step": 27910 |
| }, |
| { |
| "epoch": 15.66, |
| "learning_rate": 2.238280807699566e-05, |
| "loss": 0.2491, |
| "step": 27920 |
| }, |
| { |
| "epoch": 15.66, |
| "learning_rate": 2.2379977354217777e-05, |
| "loss": 0.2564, |
| "step": 27930 |
| }, |
| { |
| "epoch": 15.67, |
| "learning_rate": 2.2377146631439893e-05, |
| "loss": 0.2583, |
| "step": 27940 |
| }, |
| { |
| "epoch": 15.68, |
| "learning_rate": 2.2374315908662012e-05, |
| "loss": 0.2674, |
| "step": 27950 |
| }, |
| { |
| "epoch": 15.68, |
| "learning_rate": 2.237148518588413e-05, |
| "loss": 0.256, |
| "step": 27960 |
| }, |
| { |
| "epoch": 15.69, |
| "learning_rate": 2.2368654463106247e-05, |
| "loss": 0.2603, |
| "step": 27970 |
| }, |
| { |
| "epoch": 15.69, |
| "learning_rate": 2.2365823740328363e-05, |
| "loss": 0.2599, |
| "step": 27980 |
| }, |
| { |
| "epoch": 15.7, |
| "learning_rate": 2.2362993017550482e-05, |
| "loss": 0.2563, |
| "step": 27990 |
| }, |
| { |
| "epoch": 15.7, |
| "learning_rate": 2.23601622947726e-05, |
| "loss": 0.2461, |
| "step": 28000 |
| }, |
| { |
| "epoch": 15.71, |
| "learning_rate": 2.2357331571994717e-05, |
| "loss": 0.2569, |
| "step": 28010 |
| }, |
| { |
| "epoch": 15.71, |
| "learning_rate": 2.2354500849216833e-05, |
| "loss": 0.254, |
| "step": 28020 |
| }, |
| { |
| "epoch": 15.72, |
| "learning_rate": 2.235167012643895e-05, |
| "loss": 0.2571, |
| "step": 28030 |
| }, |
| { |
| "epoch": 15.73, |
| "learning_rate": 2.234883940366107e-05, |
| "loss": 0.2641, |
| "step": 28040 |
| }, |
| { |
| "epoch": 15.73, |
| "learning_rate": 2.2346008680883187e-05, |
| "loss": 0.2609, |
| "step": 28050 |
| }, |
| { |
| "epoch": 15.74, |
| "learning_rate": 2.2343177958105303e-05, |
| "loss": 0.2635, |
| "step": 28060 |
| }, |
| { |
| "epoch": 15.74, |
| "learning_rate": 2.234034723532742e-05, |
| "loss": 0.2634, |
| "step": 28070 |
| }, |
| { |
| "epoch": 15.75, |
| "learning_rate": 2.233751651254954e-05, |
| "loss": 0.2692, |
| "step": 28080 |
| }, |
| { |
| "epoch": 15.75, |
| "learning_rate": 2.2334685789771657e-05, |
| "loss": 0.2564, |
| "step": 28090 |
| }, |
| { |
| "epoch": 15.76, |
| "learning_rate": 2.2331855066993773e-05, |
| "loss": 0.2478, |
| "step": 28100 |
| }, |
| { |
| "epoch": 15.77, |
| "learning_rate": 2.232902434421589e-05, |
| "loss": 0.2508, |
| "step": 28110 |
| }, |
| { |
| "epoch": 15.77, |
| "learning_rate": 2.2326193621438008e-05, |
| "loss": 0.246, |
| "step": 28120 |
| }, |
| { |
| "epoch": 15.78, |
| "learning_rate": 2.2323362898660127e-05, |
| "loss": 0.2755, |
| "step": 28130 |
| }, |
| { |
| "epoch": 15.78, |
| "learning_rate": 2.2320532175882243e-05, |
| "loss": 0.2577, |
| "step": 28140 |
| }, |
| { |
| "epoch": 15.79, |
| "learning_rate": 2.231770145310436e-05, |
| "loss": 0.2494, |
| "step": 28150 |
| }, |
| { |
| "epoch": 15.79, |
| "learning_rate": 2.2314870730326478e-05, |
| "loss": 0.2639, |
| "step": 28160 |
| }, |
| { |
| "epoch": 15.8, |
| "learning_rate": 2.2312040007548597e-05, |
| "loss": 0.263, |
| "step": 28170 |
| }, |
| { |
| "epoch": 15.8, |
| "learning_rate": 2.2309209284770713e-05, |
| "loss": 0.2725, |
| "step": 28180 |
| }, |
| { |
| "epoch": 15.81, |
| "learning_rate": 2.230637856199283e-05, |
| "loss": 0.2596, |
| "step": 28190 |
| }, |
| { |
| "epoch": 15.82, |
| "learning_rate": 2.2303547839214948e-05, |
| "loss": 0.2609, |
| "step": 28200 |
| }, |
| { |
| "epoch": 15.82, |
| "learning_rate": 2.2300717116437064e-05, |
| "loss": 0.2544, |
| "step": 28210 |
| }, |
| { |
| "epoch": 15.83, |
| "learning_rate": 2.2297886393659183e-05, |
| "loss": 0.2648, |
| "step": 28220 |
| }, |
| { |
| "epoch": 15.83, |
| "learning_rate": 2.22950556708813e-05, |
| "loss": 0.2525, |
| "step": 28230 |
| }, |
| { |
| "epoch": 15.84, |
| "learning_rate": 2.2292224948103418e-05, |
| "loss": 0.2553, |
| "step": 28240 |
| }, |
| { |
| "epoch": 15.84, |
| "learning_rate": 2.2289394225325534e-05, |
| "loss": 0.2563, |
| "step": 28250 |
| }, |
| { |
| "epoch": 15.85, |
| "learning_rate": 2.2286563502547653e-05, |
| "loss": 0.2657, |
| "step": 28260 |
| }, |
| { |
| "epoch": 15.85, |
| "learning_rate": 2.228373277976977e-05, |
| "loss": 0.2557, |
| "step": 28270 |
| }, |
| { |
| "epoch": 15.86, |
| "learning_rate": 2.2280902056991884e-05, |
| "loss": 0.2397, |
| "step": 28280 |
| }, |
| { |
| "epoch": 15.87, |
| "learning_rate": 2.2278071334214003e-05, |
| "loss": 0.2526, |
| "step": 28290 |
| }, |
| { |
| "epoch": 15.87, |
| "learning_rate": 2.227524061143612e-05, |
| "loss": 0.2491, |
| "step": 28300 |
| }, |
| { |
| "epoch": 15.88, |
| "learning_rate": 2.227240988865824e-05, |
| "loss": 0.2546, |
| "step": 28310 |
| }, |
| { |
| "epoch": 15.88, |
| "learning_rate": 2.2269579165880354e-05, |
| "loss": 0.2665, |
| "step": 28320 |
| }, |
| { |
| "epoch": 15.89, |
| "learning_rate": 2.2266748443102473e-05, |
| "loss": 0.258, |
| "step": 28330 |
| }, |
| { |
| "epoch": 15.89, |
| "learning_rate": 2.226391772032459e-05, |
| "loss": 0.2728, |
| "step": 28340 |
| }, |
| { |
| "epoch": 15.9, |
| "learning_rate": 2.226108699754671e-05, |
| "loss": 0.2717, |
| "step": 28350 |
| }, |
| { |
| "epoch": 15.91, |
| "learning_rate": 2.2258256274768824e-05, |
| "loss": 0.2559, |
| "step": 28360 |
| }, |
| { |
| "epoch": 15.91, |
| "learning_rate": 2.2255425551990943e-05, |
| "loss": 0.2564, |
| "step": 28370 |
| }, |
| { |
| "epoch": 15.92, |
| "learning_rate": 2.225259482921306e-05, |
| "loss": 0.2482, |
| "step": 28380 |
| }, |
| { |
| "epoch": 15.92, |
| "learning_rate": 2.2249764106435175e-05, |
| "loss": 0.2637, |
| "step": 28390 |
| }, |
| { |
| "epoch": 15.93, |
| "learning_rate": 2.2246933383657294e-05, |
| "loss": 0.2532, |
| "step": 28400 |
| }, |
| { |
| "epoch": 15.93, |
| "learning_rate": 2.2244102660879413e-05, |
| "loss": 0.2533, |
| "step": 28410 |
| }, |
| { |
| "epoch": 15.94, |
| "learning_rate": 2.224127193810153e-05, |
| "loss": 0.266, |
| "step": 28420 |
| }, |
| { |
| "epoch": 15.94, |
| "learning_rate": 2.2238441215323645e-05, |
| "loss": 0.2463, |
| "step": 28430 |
| }, |
| { |
| "epoch": 15.95, |
| "learning_rate": 2.2235610492545764e-05, |
| "loss": 0.2555, |
| "step": 28440 |
| }, |
| { |
| "epoch": 15.96, |
| "learning_rate": 2.2232779769767883e-05, |
| "loss": 0.246, |
| "step": 28450 |
| }, |
| { |
| "epoch": 15.96, |
| "learning_rate": 2.222994904699e-05, |
| "loss": 0.2625, |
| "step": 28460 |
| }, |
| { |
| "epoch": 15.97, |
| "learning_rate": 2.2227118324212115e-05, |
| "loss": 0.2603, |
| "step": 28470 |
| }, |
| { |
| "epoch": 15.97, |
| "learning_rate": 2.222428760143423e-05, |
| "loss": 0.2711, |
| "step": 28480 |
| }, |
| { |
| "epoch": 15.98, |
| "learning_rate": 2.2221456878656353e-05, |
| "loss": 0.2646, |
| "step": 28490 |
| }, |
| { |
| "epoch": 15.98, |
| "learning_rate": 2.221862615587847e-05, |
| "loss": 0.2558, |
| "step": 28500 |
| }, |
| { |
| "epoch": 15.99, |
| "learning_rate": 2.2215795433100585e-05, |
| "loss": 0.2712, |
| "step": 28510 |
| }, |
| { |
| "epoch": 16.0, |
| "learning_rate": 2.22129647103227e-05, |
| "loss": 0.2508, |
| "step": 28520 |
| }, |
| { |
| "epoch": 16.0, |
| "eval_cer": 0.16283119144457295, |
| "eval_loss": 0.4134572148323059, |
| "eval_runtime": 316.3889, |
| "eval_samples_per_second": 16.824, |
| "eval_steps_per_second": 4.207, |
| "eval_wer": 0.22123512229493283, |
| "step": 28528 |
| }, |
| { |
| "epoch": 16.0, |
| "learning_rate": 2.2210133987544823e-05, |
| "loss": 0.271, |
| "step": 28530 |
| }, |
| { |
| "epoch": 16.01, |
| "learning_rate": 2.220730326476694e-05, |
| "loss": 0.2684, |
| "step": 28540 |
| }, |
| { |
| "epoch": 16.01, |
| "learning_rate": 2.2204472541989055e-05, |
| "loss": 0.2547, |
| "step": 28550 |
| }, |
| { |
| "epoch": 16.02, |
| "learning_rate": 2.220164181921117e-05, |
| "loss": 0.2369, |
| "step": 28560 |
| }, |
| { |
| "epoch": 16.02, |
| "learning_rate": 2.219881109643329e-05, |
| "loss": 0.2526, |
| "step": 28570 |
| }, |
| { |
| "epoch": 16.03, |
| "learning_rate": 2.2196263445933196e-05, |
| "loss": 0.2338, |
| "step": 28580 |
| }, |
| { |
| "epoch": 16.03, |
| "learning_rate": 2.219343272315531e-05, |
| "loss": 0.2428, |
| "step": 28590 |
| }, |
| { |
| "epoch": 16.04, |
| "learning_rate": 2.219060200037743e-05, |
| "loss": 0.2512, |
| "step": 28600 |
| }, |
| { |
| "epoch": 16.05, |
| "learning_rate": 2.218777127759955e-05, |
| "loss": 0.2361, |
| "step": 28610 |
| }, |
| { |
| "epoch": 16.05, |
| "learning_rate": 2.2184940554821665e-05, |
| "loss": 0.2644, |
| "step": 28620 |
| }, |
| { |
| "epoch": 16.06, |
| "learning_rate": 2.218210983204378e-05, |
| "loss": 0.2402, |
| "step": 28630 |
| }, |
| { |
| "epoch": 16.06, |
| "learning_rate": 2.2179279109265897e-05, |
| "loss": 0.2397, |
| "step": 28640 |
| }, |
| { |
| "epoch": 16.07, |
| "learning_rate": 2.217644838648802e-05, |
| "loss": 0.2458, |
| "step": 28650 |
| }, |
| { |
| "epoch": 16.07, |
| "learning_rate": 2.2173617663710135e-05, |
| "loss": 0.236, |
| "step": 28660 |
| }, |
| { |
| "epoch": 16.08, |
| "learning_rate": 2.217078694093225e-05, |
| "loss": 0.2492, |
| "step": 28670 |
| }, |
| { |
| "epoch": 16.09, |
| "learning_rate": 2.2167956218154367e-05, |
| "loss": 0.2272, |
| "step": 28680 |
| }, |
| { |
| "epoch": 16.09, |
| "learning_rate": 2.216512549537649e-05, |
| "loss": 0.2486, |
| "step": 28690 |
| }, |
| { |
| "epoch": 16.1, |
| "learning_rate": 2.2162294772598605e-05, |
| "loss": 0.2487, |
| "step": 28700 |
| }, |
| { |
| "epoch": 16.1, |
| "learning_rate": 2.215946404982072e-05, |
| "loss": 0.2508, |
| "step": 28710 |
| }, |
| { |
| "epoch": 16.11, |
| "learning_rate": 2.2156633327042837e-05, |
| "loss": 0.2345, |
| "step": 28720 |
| }, |
| { |
| "epoch": 16.11, |
| "learning_rate": 2.2153802604264956e-05, |
| "loss": 0.2381, |
| "step": 28730 |
| }, |
| { |
| "epoch": 16.12, |
| "learning_rate": 2.2150971881487075e-05, |
| "loss": 0.2467, |
| "step": 28740 |
| }, |
| { |
| "epoch": 16.12, |
| "learning_rate": 2.214814115870919e-05, |
| "loss": 0.2413, |
| "step": 28750 |
| }, |
| { |
| "epoch": 16.13, |
| "learning_rate": 2.2145310435931307e-05, |
| "loss": 0.2499, |
| "step": 28760 |
| }, |
| { |
| "epoch": 16.14, |
| "learning_rate": 2.2142479713153426e-05, |
| "loss": 0.255, |
| "step": 28770 |
| }, |
| { |
| "epoch": 16.14, |
| "learning_rate": 2.2139648990375545e-05, |
| "loss": 0.2472, |
| "step": 28780 |
| }, |
| { |
| "epoch": 16.15, |
| "learning_rate": 2.213681826759766e-05, |
| "loss": 0.248, |
| "step": 28790 |
| }, |
| { |
| "epoch": 16.15, |
| "learning_rate": 2.2133987544819777e-05, |
| "loss": 0.2588, |
| "step": 28800 |
| }, |
| { |
| "epoch": 16.16, |
| "learning_rate": 2.2131156822041893e-05, |
| "loss": 0.2561, |
| "step": 28810 |
| }, |
| { |
| "epoch": 16.16, |
| "learning_rate": 2.2128326099264012e-05, |
| "loss": 0.2511, |
| "step": 28820 |
| }, |
| { |
| "epoch": 16.17, |
| "learning_rate": 2.212549537648613e-05, |
| "loss": 0.2225, |
| "step": 28830 |
| }, |
| { |
| "epoch": 16.17, |
| "learning_rate": 2.2122664653708247e-05, |
| "loss": 0.2474, |
| "step": 28840 |
| }, |
| { |
| "epoch": 16.18, |
| "learning_rate": 2.2119833930930363e-05, |
| "loss": 0.2571, |
| "step": 28850 |
| }, |
| { |
| "epoch": 16.19, |
| "learning_rate": 2.2117003208152482e-05, |
| "loss": 0.2494, |
| "step": 28860 |
| }, |
| { |
| "epoch": 16.19, |
| "learning_rate": 2.21141724853746e-05, |
| "loss": 0.2551, |
| "step": 28870 |
| }, |
| { |
| "epoch": 16.2, |
| "learning_rate": 2.2111341762596717e-05, |
| "loss": 0.2456, |
| "step": 28880 |
| }, |
| { |
| "epoch": 16.2, |
| "learning_rate": 2.2108511039818833e-05, |
| "loss": 0.2422, |
| "step": 28890 |
| }, |
| { |
| "epoch": 16.21, |
| "learning_rate": 2.2105680317040952e-05, |
| "loss": 0.255, |
| "step": 28900 |
| }, |
| { |
| "epoch": 16.21, |
| "learning_rate": 2.2102849594263068e-05, |
| "loss": 0.2488, |
| "step": 28910 |
| }, |
| { |
| "epoch": 16.22, |
| "learning_rate": 2.2100018871485187e-05, |
| "loss": 0.2505, |
| "step": 28920 |
| }, |
| { |
| "epoch": 16.23, |
| "learning_rate": 2.2097188148707303e-05, |
| "loss": 0.2351, |
| "step": 28930 |
| }, |
| { |
| "epoch": 16.23, |
| "learning_rate": 2.2094357425929422e-05, |
| "loss": 0.2579, |
| "step": 28940 |
| }, |
| { |
| "epoch": 16.24, |
| "learning_rate": 2.2091526703151538e-05, |
| "loss": 0.2517, |
| "step": 28950 |
| }, |
| { |
| "epoch": 16.24, |
| "learning_rate": 2.2088695980373657e-05, |
| "loss": 0.244, |
| "step": 28960 |
| }, |
| { |
| "epoch": 16.25, |
| "learning_rate": 2.2085865257595773e-05, |
| "loss": 0.2462, |
| "step": 28970 |
| }, |
| { |
| "epoch": 16.25, |
| "learning_rate": 2.2083034534817892e-05, |
| "loss": 0.246, |
| "step": 28980 |
| }, |
| { |
| "epoch": 16.26, |
| "learning_rate": 2.2080203812040008e-05, |
| "loss": 0.2518, |
| "step": 28990 |
| }, |
| { |
| "epoch": 16.26, |
| "learning_rate": 2.2077373089262124e-05, |
| "loss": 0.2443, |
| "step": 29000 |
| }, |
| { |
| "epoch": 16.27, |
| "learning_rate": 2.2074542366484243e-05, |
| "loss": 0.2313, |
| "step": 29010 |
| }, |
| { |
| "epoch": 16.28, |
| "learning_rate": 2.2071711643706362e-05, |
| "loss": 0.2335, |
| "step": 29020 |
| }, |
| { |
| "epoch": 16.28, |
| "learning_rate": 2.2068880920928478e-05, |
| "loss": 0.2325, |
| "step": 29030 |
| }, |
| { |
| "epoch": 16.29, |
| "learning_rate": 2.2066050198150593e-05, |
| "loss": 0.2471, |
| "step": 29040 |
| }, |
| { |
| "epoch": 16.29, |
| "learning_rate": 2.2063219475372713e-05, |
| "loss": 0.2465, |
| "step": 29050 |
| }, |
| { |
| "epoch": 16.3, |
| "learning_rate": 2.2060388752594832e-05, |
| "loss": 0.2471, |
| "step": 29060 |
| }, |
| { |
| "epoch": 16.3, |
| "learning_rate": 2.2057558029816948e-05, |
| "loss": 0.25, |
| "step": 29070 |
| }, |
| { |
| "epoch": 16.31, |
| "learning_rate": 2.2054727307039063e-05, |
| "loss": 0.2473, |
| "step": 29080 |
| }, |
| { |
| "epoch": 16.32, |
| "learning_rate": 2.205189658426118e-05, |
| "loss": 0.257, |
| "step": 29090 |
| }, |
| { |
| "epoch": 16.32, |
| "learning_rate": 2.2049065861483302e-05, |
| "loss": 0.2415, |
| "step": 29100 |
| }, |
| { |
| "epoch": 16.33, |
| "learning_rate": 2.2046235138705418e-05, |
| "loss": 0.2439, |
| "step": 29110 |
| }, |
| { |
| "epoch": 16.33, |
| "learning_rate": 2.2043404415927533e-05, |
| "loss": 0.2458, |
| "step": 29120 |
| }, |
| { |
| "epoch": 16.34, |
| "learning_rate": 2.204057369314965e-05, |
| "loss": 0.2471, |
| "step": 29130 |
| }, |
| { |
| "epoch": 16.34, |
| "learning_rate": 2.2037742970371772e-05, |
| "loss": 0.2444, |
| "step": 29140 |
| }, |
| { |
| "epoch": 16.35, |
| "learning_rate": 2.2034912247593888e-05, |
| "loss": 0.2554, |
| "step": 29150 |
| }, |
| { |
| "epoch": 16.35, |
| "learning_rate": 2.2032081524816003e-05, |
| "loss": 0.2509, |
| "step": 29160 |
| }, |
| { |
| "epoch": 16.36, |
| "learning_rate": 2.202925080203812e-05, |
| "loss": 0.2658, |
| "step": 29170 |
| }, |
| { |
| "epoch": 16.37, |
| "learning_rate": 2.202642007926024e-05, |
| "loss": 0.2315, |
| "step": 29180 |
| }, |
| { |
| "epoch": 16.37, |
| "learning_rate": 2.2023589356482358e-05, |
| "loss": 0.2471, |
| "step": 29190 |
| }, |
| { |
| "epoch": 16.38, |
| "learning_rate": 2.2020758633704473e-05, |
| "loss": 0.2513, |
| "step": 29200 |
| }, |
| { |
| "epoch": 16.38, |
| "learning_rate": 2.201792791092659e-05, |
| "loss": 0.2574, |
| "step": 29210 |
| }, |
| { |
| "epoch": 16.39, |
| "learning_rate": 2.2015097188148705e-05, |
| "loss": 0.242, |
| "step": 29220 |
| }, |
| { |
| "epoch": 16.39, |
| "learning_rate": 2.2012266465370828e-05, |
| "loss": 0.2403, |
| "step": 29230 |
| }, |
| { |
| "epoch": 16.4, |
| "learning_rate": 2.2009435742592943e-05, |
| "loss": 0.2566, |
| "step": 29240 |
| }, |
| { |
| "epoch": 16.4, |
| "learning_rate": 2.200660501981506e-05, |
| "loss": 0.2455, |
| "step": 29250 |
| }, |
| { |
| "epoch": 16.41, |
| "learning_rate": 2.2003774297037175e-05, |
| "loss": 0.2349, |
| "step": 29260 |
| }, |
| { |
| "epoch": 16.42, |
| "learning_rate": 2.2000943574259294e-05, |
| "loss": 0.2473, |
| "step": 29270 |
| }, |
| { |
| "epoch": 16.42, |
| "learning_rate": 2.1998112851481413e-05, |
| "loss": 0.2292, |
| "step": 29280 |
| }, |
| { |
| "epoch": 16.43, |
| "learning_rate": 2.199528212870353e-05, |
| "loss": 0.2561, |
| "step": 29290 |
| }, |
| { |
| "epoch": 16.43, |
| "learning_rate": 2.1992451405925645e-05, |
| "loss": 0.2404, |
| "step": 29300 |
| }, |
| { |
| "epoch": 16.44, |
| "learning_rate": 2.1989620683147764e-05, |
| "loss": 0.2418, |
| "step": 29310 |
| }, |
| { |
| "epoch": 16.44, |
| "learning_rate": 2.1986789960369883e-05, |
| "loss": 0.2622, |
| "step": 29320 |
| }, |
| { |
| "epoch": 16.45, |
| "learning_rate": 2.1983959237592e-05, |
| "loss": 0.2396, |
| "step": 29330 |
| }, |
| { |
| "epoch": 16.46, |
| "learning_rate": 2.1981128514814115e-05, |
| "loss": 0.2607, |
| "step": 29340 |
| }, |
| { |
| "epoch": 16.46, |
| "learning_rate": 2.1978297792036234e-05, |
| "loss": 0.249, |
| "step": 29350 |
| }, |
| { |
| "epoch": 16.47, |
| "learning_rate": 2.197546706925835e-05, |
| "loss": 0.254, |
| "step": 29360 |
| }, |
| { |
| "epoch": 16.47, |
| "learning_rate": 2.197263634648047e-05, |
| "loss": 0.2516, |
| "step": 29370 |
| }, |
| { |
| "epoch": 16.48, |
| "learning_rate": 2.1969805623702585e-05, |
| "loss": 0.2439, |
| "step": 29380 |
| }, |
| { |
| "epoch": 16.48, |
| "learning_rate": 2.1966974900924704e-05, |
| "loss": 0.2585, |
| "step": 29390 |
| }, |
| { |
| "epoch": 16.49, |
| "learning_rate": 2.196414417814682e-05, |
| "loss": 0.2531, |
| "step": 29400 |
| }, |
| { |
| "epoch": 16.49, |
| "learning_rate": 2.196131345536894e-05, |
| "loss": 0.2578, |
| "step": 29410 |
| }, |
| { |
| "epoch": 16.5, |
| "learning_rate": 2.1958482732591055e-05, |
| "loss": 0.2576, |
| "step": 29420 |
| }, |
| { |
| "epoch": 16.51, |
| "learning_rate": 2.1955652009813174e-05, |
| "loss": 0.2485, |
| "step": 29430 |
| }, |
| { |
| "epoch": 16.51, |
| "learning_rate": 2.195282128703529e-05, |
| "loss": 0.2566, |
| "step": 29440 |
| }, |
| { |
| "epoch": 16.52, |
| "learning_rate": 2.1949990564257406e-05, |
| "loss": 0.248, |
| "step": 29450 |
| }, |
| { |
| "epoch": 16.52, |
| "learning_rate": 2.1947159841479525e-05, |
| "loss": 0.2565, |
| "step": 29460 |
| }, |
| { |
| "epoch": 16.53, |
| "learning_rate": 2.1944329118701644e-05, |
| "loss": 0.2468, |
| "step": 29470 |
| }, |
| { |
| "epoch": 16.53, |
| "learning_rate": 2.194149839592376e-05, |
| "loss": 0.2428, |
| "step": 29480 |
| }, |
| { |
| "epoch": 16.54, |
| "learning_rate": 2.1938667673145876e-05, |
| "loss": 0.2504, |
| "step": 29490 |
| }, |
| { |
| "epoch": 16.54, |
| "learning_rate": 2.1935836950367995e-05, |
| "loss": 0.2541, |
| "step": 29500 |
| }, |
| { |
| "epoch": 16.55, |
| "learning_rate": 2.1933006227590114e-05, |
| "loss": 0.241, |
| "step": 29510 |
| }, |
| { |
| "epoch": 16.56, |
| "learning_rate": 2.193017550481223e-05, |
| "loss": 0.2532, |
| "step": 29520 |
| }, |
| { |
| "epoch": 16.56, |
| "learning_rate": 2.1927344782034346e-05, |
| "loss": 0.2417, |
| "step": 29530 |
| }, |
| { |
| "epoch": 16.57, |
| "learning_rate": 2.192451405925646e-05, |
| "loss": 0.2689, |
| "step": 29540 |
| }, |
| { |
| "epoch": 16.57, |
| "learning_rate": 2.1921683336478584e-05, |
| "loss": 0.2467, |
| "step": 29550 |
| }, |
| { |
| "epoch": 16.58, |
| "learning_rate": 2.19188526137007e-05, |
| "loss": 0.2453, |
| "step": 29560 |
| }, |
| { |
| "epoch": 16.58, |
| "learning_rate": 2.1916021890922816e-05, |
| "loss": 0.2508, |
| "step": 29570 |
| }, |
| { |
| "epoch": 16.59, |
| "learning_rate": 2.191319116814493e-05, |
| "loss": 0.253, |
| "step": 29580 |
| }, |
| { |
| "epoch": 16.6, |
| "learning_rate": 2.1910360445367054e-05, |
| "loss": 0.26, |
| "step": 29590 |
| }, |
| { |
| "epoch": 16.6, |
| "learning_rate": 2.190752972258917e-05, |
| "loss": 0.2545, |
| "step": 29600 |
| }, |
| { |
| "epoch": 16.61, |
| "learning_rate": 2.1904698999811286e-05, |
| "loss": 0.2648, |
| "step": 29610 |
| }, |
| { |
| "epoch": 16.61, |
| "learning_rate": 2.19018682770334e-05, |
| "loss": 0.2432, |
| "step": 29620 |
| }, |
| { |
| "epoch": 16.62, |
| "learning_rate": 2.189903755425552e-05, |
| "loss": 0.2394, |
| "step": 29630 |
| }, |
| { |
| "epoch": 16.62, |
| "learning_rate": 2.189620683147764e-05, |
| "loss": 0.2522, |
| "step": 29640 |
| }, |
| { |
| "epoch": 16.63, |
| "learning_rate": 2.1893376108699756e-05, |
| "loss": 0.2495, |
| "step": 29650 |
| }, |
| { |
| "epoch": 16.63, |
| "learning_rate": 2.189054538592187e-05, |
| "loss": 0.2418, |
| "step": 29660 |
| }, |
| { |
| "epoch": 16.64, |
| "learning_rate": 2.1887714663143987e-05, |
| "loss": 0.2545, |
| "step": 29670 |
| }, |
| { |
| "epoch": 16.65, |
| "learning_rate": 2.188488394036611e-05, |
| "loss": 0.2434, |
| "step": 29680 |
| }, |
| { |
| "epoch": 16.65, |
| "learning_rate": 2.1882053217588226e-05, |
| "loss": 0.2549, |
| "step": 29690 |
| }, |
| { |
| "epoch": 16.66, |
| "learning_rate": 2.187922249481034e-05, |
| "loss": 0.2546, |
| "step": 29700 |
| }, |
| { |
| "epoch": 16.66, |
| "learning_rate": 2.1876391772032457e-05, |
| "loss": 0.2537, |
| "step": 29710 |
| }, |
| { |
| "epoch": 16.67, |
| "learning_rate": 2.1873561049254576e-05, |
| "loss": 0.2454, |
| "step": 29720 |
| }, |
| { |
| "epoch": 16.67, |
| "learning_rate": 2.1870730326476696e-05, |
| "loss": 0.237, |
| "step": 29730 |
| }, |
| { |
| "epoch": 16.68, |
| "learning_rate": 2.186789960369881e-05, |
| "loss": 0.2562, |
| "step": 29740 |
| }, |
| { |
| "epoch": 16.69, |
| "learning_rate": 2.1865068880920927e-05, |
| "loss": 0.2523, |
| "step": 29750 |
| }, |
| { |
| "epoch": 16.69, |
| "learning_rate": 2.1862238158143046e-05, |
| "loss": 0.2566, |
| "step": 29760 |
| }, |
| { |
| "epoch": 16.7, |
| "learning_rate": 2.1859407435365165e-05, |
| "loss": 0.2523, |
| "step": 29770 |
| }, |
| { |
| "epoch": 16.7, |
| "learning_rate": 2.185657671258728e-05, |
| "loss": 0.2403, |
| "step": 29780 |
| }, |
| { |
| "epoch": 16.71, |
| "learning_rate": 2.1853745989809397e-05, |
| "loss": 0.2654, |
| "step": 29790 |
| }, |
| { |
| "epoch": 16.71, |
| "learning_rate": 2.1850915267031516e-05, |
| "loss": 0.2583, |
| "step": 29800 |
| }, |
| { |
| "epoch": 16.72, |
| "learning_rate": 2.1848084544253632e-05, |
| "loss": 0.2525, |
| "step": 29810 |
| }, |
| { |
| "epoch": 16.72, |
| "learning_rate": 2.184525382147575e-05, |
| "loss": 0.2451, |
| "step": 29820 |
| }, |
| { |
| "epoch": 16.73, |
| "learning_rate": 2.1842423098697867e-05, |
| "loss": 0.2369, |
| "step": 29830 |
| }, |
| { |
| "epoch": 16.74, |
| "learning_rate": 2.1839592375919986e-05, |
| "loss": 0.2599, |
| "step": 29840 |
| }, |
| { |
| "epoch": 16.74, |
| "learning_rate": 2.1836761653142102e-05, |
| "loss": 0.2522, |
| "step": 29850 |
| }, |
| { |
| "epoch": 16.75, |
| "learning_rate": 2.183393093036422e-05, |
| "loss": 0.2525, |
| "step": 29860 |
| }, |
| { |
| "epoch": 16.75, |
| "learning_rate": 2.1831100207586337e-05, |
| "loss": 0.2467, |
| "step": 29870 |
| }, |
| { |
| "epoch": 16.76, |
| "learning_rate": 2.1828269484808456e-05, |
| "loss": 0.2472, |
| "step": 29880 |
| }, |
| { |
| "epoch": 16.76, |
| "learning_rate": 2.1825438762030572e-05, |
| "loss": 0.2566, |
| "step": 29890 |
| }, |
| { |
| "epoch": 16.77, |
| "learning_rate": 2.1822608039252688e-05, |
| "loss": 0.2479, |
| "step": 29900 |
| }, |
| { |
| "epoch": 16.77, |
| "learning_rate": 2.1819777316474807e-05, |
| "loss": 0.2452, |
| "step": 29910 |
| }, |
| { |
| "epoch": 16.78, |
| "learning_rate": 2.1816946593696926e-05, |
| "loss": 0.2461, |
| "step": 29920 |
| }, |
| { |
| "epoch": 16.79, |
| "learning_rate": 2.1814115870919042e-05, |
| "loss": 0.2485, |
| "step": 29930 |
| }, |
| { |
| "epoch": 16.79, |
| "learning_rate": 2.1811285148141158e-05, |
| "loss": 0.2541, |
| "step": 29940 |
| }, |
| { |
| "epoch": 16.8, |
| "learning_rate": 2.1808454425363277e-05, |
| "loss": 0.2514, |
| "step": 29950 |
| }, |
| { |
| "epoch": 16.8, |
| "learning_rate": 2.1805623702585396e-05, |
| "loss": 0.2557, |
| "step": 29960 |
| }, |
| { |
| "epoch": 16.81, |
| "learning_rate": 2.1802792979807512e-05, |
| "loss": 0.2501, |
| "step": 29970 |
| }, |
| { |
| "epoch": 16.81, |
| "learning_rate": 2.1799962257029628e-05, |
| "loss": 0.2375, |
| "step": 29980 |
| }, |
| { |
| "epoch": 16.82, |
| "learning_rate": 2.1797131534251744e-05, |
| "loss": 0.2568, |
| "step": 29990 |
| }, |
| { |
| "epoch": 16.83, |
| "learning_rate": 2.1794300811473866e-05, |
| "loss": 0.2407, |
| "step": 30000 |
| }, |
| { |
| "epoch": 16.83, |
| "learning_rate": 2.1791470088695982e-05, |
| "loss": 0.2561, |
| "step": 30010 |
| }, |
| { |
| "epoch": 16.84, |
| "learning_rate": 2.1788639365918098e-05, |
| "loss": 0.2422, |
| "step": 30020 |
| }, |
| { |
| "epoch": 16.84, |
| "learning_rate": 2.1785808643140214e-05, |
| "loss": 0.257, |
| "step": 30030 |
| }, |
| { |
| "epoch": 16.85, |
| "learning_rate": 2.1782977920362336e-05, |
| "loss": 0.2519, |
| "step": 30040 |
| }, |
| { |
| "epoch": 16.85, |
| "learning_rate": 2.1780147197584452e-05, |
| "loss": 0.2482, |
| "step": 30050 |
| }, |
| { |
| "epoch": 16.86, |
| "learning_rate": 2.1777316474806568e-05, |
| "loss": 0.2528, |
| "step": 30060 |
| }, |
| { |
| "epoch": 16.86, |
| "learning_rate": 2.1774485752028684e-05, |
| "loss": 0.2417, |
| "step": 30070 |
| }, |
| { |
| "epoch": 16.87, |
| "learning_rate": 2.17716550292508e-05, |
| "loss": 0.2617, |
| "step": 30080 |
| }, |
| { |
| "epoch": 16.88, |
| "learning_rate": 2.1768824306472922e-05, |
| "loss": 0.2496, |
| "step": 30090 |
| }, |
| { |
| "epoch": 16.88, |
| "learning_rate": 2.1765993583695038e-05, |
| "loss": 0.255, |
| "step": 30100 |
| }, |
| { |
| "epoch": 16.89, |
| "learning_rate": 2.1763162860917154e-05, |
| "loss": 0.2468, |
| "step": 30110 |
| }, |
| { |
| "epoch": 16.89, |
| "learning_rate": 2.176033213813927e-05, |
| "loss": 0.2586, |
| "step": 30120 |
| }, |
| { |
| "epoch": 16.9, |
| "learning_rate": 2.1757501415361392e-05, |
| "loss": 0.2287, |
| "step": 30130 |
| }, |
| { |
| "epoch": 16.9, |
| "learning_rate": 2.1754670692583508e-05, |
| "loss": 0.249, |
| "step": 30140 |
| }, |
| { |
| "epoch": 16.91, |
| "learning_rate": 2.1751839969805624e-05, |
| "loss": 0.2548, |
| "step": 30150 |
| }, |
| { |
| "epoch": 16.91, |
| "learning_rate": 2.174900924702774e-05, |
| "loss": 0.242, |
| "step": 30160 |
| }, |
| { |
| "epoch": 16.92, |
| "learning_rate": 2.174617852424986e-05, |
| "loss": 0.2464, |
| "step": 30170 |
| }, |
| { |
| "epoch": 16.93, |
| "learning_rate": 2.1743347801471978e-05, |
| "loss": 0.2363, |
| "step": 30180 |
| }, |
| { |
| "epoch": 16.93, |
| "learning_rate": 2.1740517078694093e-05, |
| "loss": 0.2503, |
| "step": 30190 |
| }, |
| { |
| "epoch": 16.94, |
| "learning_rate": 2.173768635591621e-05, |
| "loss": 0.2565, |
| "step": 30200 |
| }, |
| { |
| "epoch": 16.94, |
| "learning_rate": 2.173485563313833e-05, |
| "loss": 0.2404, |
| "step": 30210 |
| }, |
| { |
| "epoch": 16.95, |
| "learning_rate": 2.1732024910360448e-05, |
| "loss": 0.2409, |
| "step": 30220 |
| }, |
| { |
| "epoch": 16.95, |
| "learning_rate": 2.1729194187582563e-05, |
| "loss": 0.2395, |
| "step": 30230 |
| }, |
| { |
| "epoch": 16.96, |
| "learning_rate": 2.172636346480468e-05, |
| "loss": 0.2527, |
| "step": 30240 |
| }, |
| { |
| "epoch": 16.97, |
| "learning_rate": 2.17235327420268e-05, |
| "loss": 0.2392, |
| "step": 30250 |
| }, |
| { |
| "epoch": 16.97, |
| "learning_rate": 2.1720702019248914e-05, |
| "loss": 0.2429, |
| "step": 30260 |
| }, |
| { |
| "epoch": 16.98, |
| "learning_rate": 2.1717871296471033e-05, |
| "loss": 0.2516, |
| "step": 30270 |
| }, |
| { |
| "epoch": 16.98, |
| "learning_rate": 2.171504057369315e-05, |
| "loss": 0.251, |
| "step": 30280 |
| }, |
| { |
| "epoch": 16.99, |
| "learning_rate": 2.171220985091527e-05, |
| "loss": 0.242, |
| "step": 30290 |
| }, |
| { |
| "epoch": 16.99, |
| "learning_rate": 2.1709379128137384e-05, |
| "loss": 0.2487, |
| "step": 30300 |
| }, |
| { |
| "epoch": 17.0, |
| "learning_rate": 2.1706548405359503e-05, |
| "loss": 0.2543, |
| "step": 30310 |
| }, |
| { |
| "epoch": 17.0, |
| "eval_cer": 0.1620923744057442, |
| "eval_loss": 0.41483616828918457, |
| "eval_runtime": 314.3278, |
| "eval_samples_per_second": 16.935, |
| "eval_steps_per_second": 4.234, |
| "eval_wer": 0.2213534340648506, |
| "step": 30311 |
| }, |
| { |
| "epoch": 17.01, |
| "learning_rate": 2.170371768258162e-05, |
| "loss": 0.2613, |
| "step": 30320 |
| }, |
| { |
| "epoch": 17.01, |
| "learning_rate": 2.170088695980374e-05, |
| "loss": 0.2449, |
| "step": 30330 |
| }, |
| { |
| "epoch": 17.02, |
| "learning_rate": 2.1698056237025854e-05, |
| "loss": 0.2412, |
| "step": 30340 |
| }, |
| { |
| "epoch": 17.02, |
| "learning_rate": 2.169550858652576e-05, |
| "loss": 0.2447, |
| "step": 30350 |
| }, |
| { |
| "epoch": 17.03, |
| "learning_rate": 2.1692677863747876e-05, |
| "loss": 0.2444, |
| "step": 30360 |
| }, |
| { |
| "epoch": 17.03, |
| "learning_rate": 2.1689847140969995e-05, |
| "loss": 0.2356, |
| "step": 30370 |
| }, |
| { |
| "epoch": 17.04, |
| "learning_rate": 2.1687016418192114e-05, |
| "loss": 0.2623, |
| "step": 30380 |
| }, |
| { |
| "epoch": 17.04, |
| "learning_rate": 2.168418569541423e-05, |
| "loss": 0.2369, |
| "step": 30390 |
| }, |
| { |
| "epoch": 17.05, |
| "learning_rate": 2.1681354972636346e-05, |
| "loss": 0.2284, |
| "step": 30400 |
| }, |
| { |
| "epoch": 17.06, |
| "learning_rate": 2.1678524249858465e-05, |
| "loss": 0.236, |
| "step": 30410 |
| }, |
| { |
| "epoch": 17.06, |
| "learning_rate": 2.1675693527080584e-05, |
| "loss": 0.2372, |
| "step": 30420 |
| }, |
| { |
| "epoch": 17.07, |
| "learning_rate": 2.16728628043027e-05, |
| "loss": 0.2423, |
| "step": 30430 |
| }, |
| { |
| "epoch": 17.07, |
| "learning_rate": 2.1670032081524816e-05, |
| "loss": 0.2304, |
| "step": 30440 |
| }, |
| { |
| "epoch": 17.08, |
| "learning_rate": 2.1667201358746935e-05, |
| "loss": 0.2285, |
| "step": 30450 |
| }, |
| { |
| "epoch": 17.08, |
| "learning_rate": 2.166437063596905e-05, |
| "loss": 0.217, |
| "step": 30460 |
| }, |
| { |
| "epoch": 17.09, |
| "learning_rate": 2.166153991319117e-05, |
| "loss": 0.2454, |
| "step": 30470 |
| }, |
| { |
| "epoch": 17.09, |
| "learning_rate": 2.1658709190413286e-05, |
| "loss": 0.25, |
| "step": 30480 |
| }, |
| { |
| "epoch": 17.1, |
| "learning_rate": 2.1655878467635405e-05, |
| "loss": 0.2422, |
| "step": 30490 |
| }, |
| { |
| "epoch": 17.11, |
| "learning_rate": 2.165304774485752e-05, |
| "loss": 0.2406, |
| "step": 30500 |
| }, |
| { |
| "epoch": 17.11, |
| "learning_rate": 2.165021702207964e-05, |
| "loss": 0.2208, |
| "step": 30510 |
| }, |
| { |
| "epoch": 17.12, |
| "learning_rate": 2.1647386299301755e-05, |
| "loss": 0.2385, |
| "step": 30520 |
| }, |
| { |
| "epoch": 17.12, |
| "learning_rate": 2.1644555576523875e-05, |
| "loss": 0.2398, |
| "step": 30530 |
| }, |
| { |
| "epoch": 17.13, |
| "learning_rate": 2.164172485374599e-05, |
| "loss": 0.2257, |
| "step": 30540 |
| }, |
| { |
| "epoch": 17.13, |
| "learning_rate": 2.1638894130968106e-05, |
| "loss": 0.2444, |
| "step": 30550 |
| }, |
| { |
| "epoch": 17.14, |
| "learning_rate": 2.1636063408190225e-05, |
| "loss": 0.2318, |
| "step": 30560 |
| }, |
| { |
| "epoch": 17.15, |
| "learning_rate": 2.1633232685412345e-05, |
| "loss": 0.2401, |
| "step": 30570 |
| }, |
| { |
| "epoch": 17.15, |
| "learning_rate": 2.163040196263446e-05, |
| "loss": 0.2355, |
| "step": 30580 |
| }, |
| { |
| "epoch": 17.16, |
| "learning_rate": 2.1627571239856576e-05, |
| "loss": 0.2486, |
| "step": 30590 |
| }, |
| { |
| "epoch": 17.16, |
| "learning_rate": 2.1624740517078695e-05, |
| "loss": 0.2337, |
| "step": 30600 |
| }, |
| { |
| "epoch": 17.17, |
| "learning_rate": 2.162190979430081e-05, |
| "loss": 0.228, |
| "step": 30610 |
| }, |
| { |
| "epoch": 17.17, |
| "learning_rate": 2.161907907152293e-05, |
| "loss": 0.2409, |
| "step": 30620 |
| }, |
| { |
| "epoch": 17.18, |
| "learning_rate": 2.1616248348745046e-05, |
| "loss": 0.2312, |
| "step": 30630 |
| }, |
| { |
| "epoch": 17.18, |
| "learning_rate": 2.1613417625967162e-05, |
| "loss": 0.24, |
| "step": 30640 |
| }, |
| { |
| "epoch": 17.19, |
| "learning_rate": 2.161058690318928e-05, |
| "loss": 0.2397, |
| "step": 30650 |
| }, |
| { |
| "epoch": 17.2, |
| "learning_rate": 2.16077561804114e-05, |
| "loss": 0.2299, |
| "step": 30660 |
| }, |
| { |
| "epoch": 17.2, |
| "learning_rate": 2.1604925457633516e-05, |
| "loss": 0.2371, |
| "step": 30670 |
| }, |
| { |
| "epoch": 17.21, |
| "learning_rate": 2.1602094734855632e-05, |
| "loss": 0.2432, |
| "step": 30680 |
| }, |
| { |
| "epoch": 17.21, |
| "learning_rate": 2.159926401207775e-05, |
| "loss": 0.2374, |
| "step": 30690 |
| }, |
| { |
| "epoch": 17.22, |
| "learning_rate": 2.159643328929987e-05, |
| "loss": 0.2288, |
| "step": 30700 |
| }, |
| { |
| "epoch": 17.22, |
| "learning_rate": 2.1593602566521986e-05, |
| "loss": 0.2425, |
| "step": 30710 |
| }, |
| { |
| "epoch": 17.23, |
| "learning_rate": 2.1590771843744102e-05, |
| "loss": 0.238, |
| "step": 30720 |
| }, |
| { |
| "epoch": 17.23, |
| "learning_rate": 2.1587941120966218e-05, |
| "loss": 0.257, |
| "step": 30730 |
| }, |
| { |
| "epoch": 17.24, |
| "learning_rate": 2.158511039818834e-05, |
| "loss": 0.2414, |
| "step": 30740 |
| }, |
| { |
| "epoch": 17.25, |
| "learning_rate": 2.1582279675410456e-05, |
| "loss": 0.239, |
| "step": 30750 |
| }, |
| { |
| "epoch": 17.25, |
| "learning_rate": 2.1579448952632572e-05, |
| "loss": 0.2356, |
| "step": 30760 |
| }, |
| { |
| "epoch": 17.26, |
| "learning_rate": 2.1576618229854688e-05, |
| "loss": 0.2451, |
| "step": 30770 |
| }, |
| { |
| "epoch": 17.26, |
| "learning_rate": 2.157378750707681e-05, |
| "loss": 0.2489, |
| "step": 30780 |
| }, |
| { |
| "epoch": 17.27, |
| "learning_rate": 2.1570956784298926e-05, |
| "loss": 0.2415, |
| "step": 30790 |
| }, |
| { |
| "epoch": 17.27, |
| "learning_rate": 2.1568126061521042e-05, |
| "loss": 0.2491, |
| "step": 30800 |
| }, |
| { |
| "epoch": 17.28, |
| "learning_rate": 2.1565295338743158e-05, |
| "loss": 0.208, |
| "step": 30810 |
| }, |
| { |
| "epoch": 17.29, |
| "learning_rate": 2.1562464615965277e-05, |
| "loss": 0.2299, |
| "step": 30820 |
| }, |
| { |
| "epoch": 17.29, |
| "learning_rate": 2.1559633893187396e-05, |
| "loss": 0.2313, |
| "step": 30830 |
| }, |
| { |
| "epoch": 17.3, |
| "learning_rate": 2.1556803170409512e-05, |
| "loss": 0.2547, |
| "step": 30840 |
| }, |
| { |
| "epoch": 17.3, |
| "learning_rate": 2.1553972447631628e-05, |
| "loss": 0.2368, |
| "step": 30850 |
| }, |
| { |
| "epoch": 17.31, |
| "learning_rate": 2.1551141724853747e-05, |
| "loss": 0.2388, |
| "step": 30860 |
| }, |
| { |
| "epoch": 17.31, |
| "learning_rate": 2.1548311002075866e-05, |
| "loss": 0.2342, |
| "step": 30870 |
| }, |
| { |
| "epoch": 17.32, |
| "learning_rate": 2.1545480279297982e-05, |
| "loss": 0.2351, |
| "step": 30880 |
| }, |
| { |
| "epoch": 17.32, |
| "learning_rate": 2.1542649556520098e-05, |
| "loss": 0.2447, |
| "step": 30890 |
| }, |
| { |
| "epoch": 17.33, |
| "learning_rate": 2.1539818833742217e-05, |
| "loss": 0.2255, |
| "step": 30900 |
| }, |
| { |
| "epoch": 17.34, |
| "learning_rate": 2.1536988110964333e-05, |
| "loss": 0.2391, |
| "step": 30910 |
| }, |
| { |
| "epoch": 17.34, |
| "learning_rate": 2.1534157388186452e-05, |
| "loss": 0.2464, |
| "step": 30920 |
| }, |
| { |
| "epoch": 17.35, |
| "learning_rate": 2.1531326665408568e-05, |
| "loss": 0.2429, |
| "step": 30930 |
| }, |
| { |
| "epoch": 17.35, |
| "learning_rate": 2.1528495942630687e-05, |
| "loss": 0.2383, |
| "step": 30940 |
| }, |
| { |
| "epoch": 17.36, |
| "learning_rate": 2.1525665219852803e-05, |
| "loss": 0.2421, |
| "step": 30950 |
| }, |
| { |
| "epoch": 17.36, |
| "learning_rate": 2.1522834497074922e-05, |
| "loss": 0.2523, |
| "step": 30960 |
| }, |
| { |
| "epoch": 17.37, |
| "learning_rate": 2.1520003774297038e-05, |
| "loss": 0.2452, |
| "step": 30970 |
| }, |
| { |
| "epoch": 17.38, |
| "learning_rate": 2.1517173051519157e-05, |
| "loss": 0.2455, |
| "step": 30980 |
| }, |
| { |
| "epoch": 17.38, |
| "learning_rate": 2.1514342328741273e-05, |
| "loss": 0.2466, |
| "step": 30990 |
| }, |
| { |
| "epoch": 17.39, |
| "learning_rate": 2.151151160596339e-05, |
| "loss": 0.2405, |
| "step": 31000 |
| }, |
| { |
| "epoch": 17.39, |
| "learning_rate": 2.1508680883185508e-05, |
| "loss": 0.2341, |
| "step": 31010 |
| }, |
| { |
| "epoch": 17.4, |
| "learning_rate": 2.1505850160407623e-05, |
| "loss": 0.2357, |
| "step": 31020 |
| }, |
| { |
| "epoch": 17.4, |
| "learning_rate": 2.1503019437629743e-05, |
| "loss": 0.2452, |
| "step": 31030 |
| }, |
| { |
| "epoch": 17.41, |
| "learning_rate": 2.150018871485186e-05, |
| "loss": 0.2456, |
| "step": 31040 |
| }, |
| { |
| "epoch": 17.41, |
| "learning_rate": 2.1497357992073978e-05, |
| "loss": 0.245, |
| "step": 31050 |
| }, |
| { |
| "epoch": 17.42, |
| "learning_rate": 2.1494527269296093e-05, |
| "loss": 0.2342, |
| "step": 31060 |
| }, |
| { |
| "epoch": 17.43, |
| "learning_rate": 2.1491696546518213e-05, |
| "loss": 0.2457, |
| "step": 31070 |
| }, |
| { |
| "epoch": 17.43, |
| "learning_rate": 2.148886582374033e-05, |
| "loss": 0.2516, |
| "step": 31080 |
| }, |
| { |
| "epoch": 17.44, |
| "learning_rate": 2.1486035100962444e-05, |
| "loss": 0.2395, |
| "step": 31090 |
| }, |
| { |
| "epoch": 17.44, |
| "learning_rate": 2.1483204378184563e-05, |
| "loss": 0.237, |
| "step": 31100 |
| }, |
| { |
| "epoch": 17.45, |
| "learning_rate": 2.1480373655406683e-05, |
| "loss": 0.2571, |
| "step": 31110 |
| }, |
| { |
| "epoch": 17.45, |
| "learning_rate": 2.14775429326288e-05, |
| "loss": 0.2453, |
| "step": 31120 |
| }, |
| { |
| "epoch": 17.46, |
| "learning_rate": 2.1474712209850914e-05, |
| "loss": 0.2483, |
| "step": 31130 |
| }, |
| { |
| "epoch": 17.46, |
| "learning_rate": 2.1471881487073033e-05, |
| "loss": 0.2546, |
| "step": 31140 |
| }, |
| { |
| "epoch": 17.47, |
| "learning_rate": 2.1469050764295153e-05, |
| "loss": 0.2437, |
| "step": 31150 |
| }, |
| { |
| "epoch": 17.48, |
| "learning_rate": 2.146622004151727e-05, |
| "loss": 0.2406, |
| "step": 31160 |
| }, |
| { |
| "epoch": 17.48, |
| "learning_rate": 2.1463389318739384e-05, |
| "loss": 0.2624, |
| "step": 31170 |
| }, |
| { |
| "epoch": 17.49, |
| "learning_rate": 2.14605585959615e-05, |
| "loss": 0.2472, |
| "step": 31180 |
| }, |
| { |
| "epoch": 17.49, |
| "learning_rate": 2.1457727873183623e-05, |
| "loss": 0.2345, |
| "step": 31190 |
| }, |
| { |
| "epoch": 17.5, |
| "learning_rate": 2.145489715040574e-05, |
| "loss": 0.2419, |
| "step": 31200 |
| }, |
| { |
| "epoch": 17.5, |
| "learning_rate": 2.1452066427627854e-05, |
| "loss": 0.2267, |
| "step": 31210 |
| }, |
| { |
| "epoch": 17.51, |
| "learning_rate": 2.144923570484997e-05, |
| "loss": 0.2465, |
| "step": 31220 |
| }, |
| { |
| "epoch": 17.52, |
| "learning_rate": 2.1446404982072093e-05, |
| "loss": 0.2422, |
| "step": 31230 |
| }, |
| { |
| "epoch": 17.52, |
| "learning_rate": 2.144357425929421e-05, |
| "loss": 0.2356, |
| "step": 31240 |
| }, |
| { |
| "epoch": 17.53, |
| "learning_rate": 2.1440743536516324e-05, |
| "loss": 0.2501, |
| "step": 31250 |
| }, |
| { |
| "epoch": 17.53, |
| "learning_rate": 2.143791281373844e-05, |
| "loss": 0.2248, |
| "step": 31260 |
| }, |
| { |
| "epoch": 17.54, |
| "learning_rate": 2.143508209096056e-05, |
| "loss": 0.2526, |
| "step": 31270 |
| }, |
| { |
| "epoch": 17.54, |
| "learning_rate": 2.1432251368182678e-05, |
| "loss": 0.2413, |
| "step": 31280 |
| }, |
| { |
| "epoch": 17.55, |
| "learning_rate": 2.1429420645404794e-05, |
| "loss": 0.2368, |
| "step": 31290 |
| }, |
| { |
| "epoch": 17.55, |
| "learning_rate": 2.142658992262691e-05, |
| "loss": 0.2302, |
| "step": 31300 |
| }, |
| { |
| "epoch": 17.56, |
| "learning_rate": 2.142375919984903e-05, |
| "loss": 0.2265, |
| "step": 31310 |
| }, |
| { |
| "epoch": 17.57, |
| "learning_rate": 2.1420928477071148e-05, |
| "loss": 0.2522, |
| "step": 31320 |
| }, |
| { |
| "epoch": 17.57, |
| "learning_rate": 2.1418097754293264e-05, |
| "loss": 0.2407, |
| "step": 31330 |
| }, |
| { |
| "epoch": 17.58, |
| "learning_rate": 2.141526703151538e-05, |
| "loss": 0.239, |
| "step": 31340 |
| }, |
| { |
| "epoch": 17.58, |
| "learning_rate": 2.14124363087375e-05, |
| "loss": 0.2411, |
| "step": 31350 |
| }, |
| { |
| "epoch": 17.59, |
| "learning_rate": 2.1409605585959615e-05, |
| "loss": 0.2282, |
| "step": 31360 |
| }, |
| { |
| "epoch": 17.59, |
| "learning_rate": 2.1406774863181734e-05, |
| "loss": 0.241, |
| "step": 31370 |
| }, |
| { |
| "epoch": 17.6, |
| "learning_rate": 2.140394414040385e-05, |
| "loss": 0.2518, |
| "step": 31380 |
| }, |
| { |
| "epoch": 17.6, |
| "learning_rate": 2.140111341762597e-05, |
| "loss": 0.2555, |
| "step": 31390 |
| }, |
| { |
| "epoch": 17.61, |
| "learning_rate": 2.1398282694848085e-05, |
| "loss": 0.2454, |
| "step": 31400 |
| }, |
| { |
| "epoch": 17.62, |
| "learning_rate": 2.1395451972070204e-05, |
| "loss": 0.2297, |
| "step": 31410 |
| }, |
| { |
| "epoch": 17.62, |
| "learning_rate": 2.139262124929232e-05, |
| "loss": 0.242, |
| "step": 31420 |
| }, |
| { |
| "epoch": 17.63, |
| "learning_rate": 2.138979052651444e-05, |
| "loss": 0.2491, |
| "step": 31430 |
| }, |
| { |
| "epoch": 17.63, |
| "learning_rate": 2.1386959803736555e-05, |
| "loss": 0.2364, |
| "step": 31440 |
| }, |
| { |
| "epoch": 17.64, |
| "learning_rate": 2.138412908095867e-05, |
| "loss": 0.2417, |
| "step": 31450 |
| }, |
| { |
| "epoch": 17.64, |
| "learning_rate": 2.138129835818079e-05, |
| "loss": 0.2296, |
| "step": 31460 |
| }, |
| { |
| "epoch": 17.65, |
| "learning_rate": 2.1378467635402906e-05, |
| "loss": 0.2574, |
| "step": 31470 |
| }, |
| { |
| "epoch": 17.66, |
| "learning_rate": 2.1375636912625025e-05, |
| "loss": 0.2471, |
| "step": 31480 |
| }, |
| { |
| "epoch": 17.66, |
| "learning_rate": 2.137280618984714e-05, |
| "loss": 0.2364, |
| "step": 31490 |
| }, |
| { |
| "epoch": 17.67, |
| "learning_rate": 2.136997546706926e-05, |
| "loss": 0.244, |
| "step": 31500 |
| }, |
| { |
| "epoch": 17.67, |
| "learning_rate": 2.1367144744291376e-05, |
| "loss": 0.2273, |
| "step": 31510 |
| }, |
| { |
| "epoch": 17.68, |
| "learning_rate": 2.1364314021513495e-05, |
| "loss": 0.2472, |
| "step": 31520 |
| }, |
| { |
| "epoch": 17.68, |
| "learning_rate": 2.136148329873561e-05, |
| "loss": 0.2482, |
| "step": 31530 |
| }, |
| { |
| "epoch": 17.69, |
| "learning_rate": 2.1358652575957726e-05, |
| "loss": 0.2332, |
| "step": 31540 |
| }, |
| { |
| "epoch": 17.69, |
| "learning_rate": 2.1355821853179846e-05, |
| "loss": 0.2396, |
| "step": 31550 |
| }, |
| { |
| "epoch": 17.7, |
| "learning_rate": 2.1352991130401965e-05, |
| "loss": 0.2232, |
| "step": 31560 |
| }, |
| { |
| "epoch": 17.71, |
| "learning_rate": 2.135016040762408e-05, |
| "loss": 0.2422, |
| "step": 31570 |
| }, |
| { |
| "epoch": 17.71, |
| "learning_rate": 2.1347329684846196e-05, |
| "loss": 0.2458, |
| "step": 31580 |
| }, |
| { |
| "epoch": 17.72, |
| "learning_rate": 2.1344498962068316e-05, |
| "loss": 0.2329, |
| "step": 31590 |
| }, |
| { |
| "epoch": 17.72, |
| "learning_rate": 2.1341668239290435e-05, |
| "loss": 0.2401, |
| "step": 31600 |
| }, |
| { |
| "epoch": 17.73, |
| "learning_rate": 2.133883751651255e-05, |
| "loss": 0.2481, |
| "step": 31610 |
| }, |
| { |
| "epoch": 17.73, |
| "learning_rate": 2.1336006793734666e-05, |
| "loss": 0.248, |
| "step": 31620 |
| }, |
| { |
| "epoch": 17.74, |
| "learning_rate": 2.1333176070956782e-05, |
| "loss": 0.2535, |
| "step": 31630 |
| }, |
| { |
| "epoch": 17.75, |
| "learning_rate": 2.1330345348178905e-05, |
| "loss": 0.2461, |
| "step": 31640 |
| }, |
| { |
| "epoch": 17.75, |
| "learning_rate": 2.132751462540102e-05, |
| "loss": 0.2415, |
| "step": 31650 |
| }, |
| { |
| "epoch": 17.76, |
| "learning_rate": 2.1324683902623136e-05, |
| "loss": 0.2443, |
| "step": 31660 |
| }, |
| { |
| "epoch": 17.76, |
| "learning_rate": 2.1321853179845252e-05, |
| "loss": 0.2439, |
| "step": 31670 |
| }, |
| { |
| "epoch": 17.77, |
| "learning_rate": 2.1319022457067375e-05, |
| "loss": 0.2444, |
| "step": 31680 |
| }, |
| { |
| "epoch": 17.77, |
| "learning_rate": 2.131619173428949e-05, |
| "loss": 0.2396, |
| "step": 31690 |
| }, |
| { |
| "epoch": 17.78, |
| "learning_rate": 2.1313361011511606e-05, |
| "loss": 0.2426, |
| "step": 31700 |
| }, |
| { |
| "epoch": 17.78, |
| "learning_rate": 2.1310530288733722e-05, |
| "loss": 0.2355, |
| "step": 31710 |
| }, |
| { |
| "epoch": 17.79, |
| "learning_rate": 2.130769956595584e-05, |
| "loss": 0.2502, |
| "step": 31720 |
| }, |
| { |
| "epoch": 17.8, |
| "learning_rate": 2.130486884317796e-05, |
| "loss": 0.2424, |
| "step": 31730 |
| }, |
| { |
| "epoch": 17.8, |
| "learning_rate": 2.1302038120400076e-05, |
| "loss": 0.2339, |
| "step": 31740 |
| }, |
| { |
| "epoch": 17.81, |
| "learning_rate": 2.1299207397622192e-05, |
| "loss": 0.2322, |
| "step": 31750 |
| }, |
| { |
| "epoch": 17.81, |
| "learning_rate": 2.129637667484431e-05, |
| "loss": 0.247, |
| "step": 31760 |
| }, |
| { |
| "epoch": 17.82, |
| "learning_rate": 2.129354595206643e-05, |
| "loss": 0.2452, |
| "step": 31770 |
| }, |
| { |
| "epoch": 17.82, |
| "learning_rate": 2.1290715229288546e-05, |
| "loss": 0.2391, |
| "step": 31780 |
| }, |
| { |
| "epoch": 17.83, |
| "learning_rate": 2.1287884506510662e-05, |
| "loss": 0.2396, |
| "step": 31790 |
| }, |
| { |
| "epoch": 17.83, |
| "learning_rate": 2.128505378373278e-05, |
| "loss": 0.2362, |
| "step": 31800 |
| }, |
| { |
| "epoch": 17.84, |
| "learning_rate": 2.1282223060954897e-05, |
| "loss": 0.2259, |
| "step": 31810 |
| }, |
| { |
| "epoch": 17.85, |
| "learning_rate": 2.1279392338177016e-05, |
| "loss": 0.234, |
| "step": 31820 |
| }, |
| { |
| "epoch": 17.85, |
| "learning_rate": 2.1276561615399132e-05, |
| "loss": 0.2479, |
| "step": 31830 |
| }, |
| { |
| "epoch": 17.86, |
| "learning_rate": 2.127373089262125e-05, |
| "loss": 0.2437, |
| "step": 31840 |
| }, |
| { |
| "epoch": 17.86, |
| "learning_rate": 2.1270900169843367e-05, |
| "loss": 0.2455, |
| "step": 31850 |
| }, |
| { |
| "epoch": 17.87, |
| "learning_rate": 2.1268069447065486e-05, |
| "loss": 0.2403, |
| "step": 31860 |
| }, |
| { |
| "epoch": 17.87, |
| "learning_rate": 2.1265238724287602e-05, |
| "loss": 0.2321, |
| "step": 31870 |
| }, |
| { |
| "epoch": 17.88, |
| "learning_rate": 2.1262408001509718e-05, |
| "loss": 0.233, |
| "step": 31880 |
| }, |
| { |
| "epoch": 17.89, |
| "learning_rate": 2.1259577278731837e-05, |
| "loss": 0.2411, |
| "step": 31890 |
| }, |
| { |
| "epoch": 17.89, |
| "learning_rate": 2.1256746555953953e-05, |
| "loss": 0.2531, |
| "step": 31900 |
| }, |
| { |
| "epoch": 17.9, |
| "learning_rate": 2.1253915833176072e-05, |
| "loss": 0.2407, |
| "step": 31910 |
| }, |
| { |
| "epoch": 17.9, |
| "learning_rate": 2.1251085110398188e-05, |
| "loss": 0.2415, |
| "step": 31920 |
| }, |
| { |
| "epoch": 17.91, |
| "learning_rate": 2.1248254387620307e-05, |
| "loss": 0.2457, |
| "step": 31930 |
| }, |
| { |
| "epoch": 17.91, |
| "learning_rate": 2.1245423664842423e-05, |
| "loss": 0.243, |
| "step": 31940 |
| }, |
| { |
| "epoch": 17.92, |
| "learning_rate": 2.1242592942064542e-05, |
| "loss": 0.2357, |
| "step": 31950 |
| }, |
| { |
| "epoch": 17.92, |
| "learning_rate": 2.1239762219286658e-05, |
| "loss": 0.2371, |
| "step": 31960 |
| }, |
| { |
| "epoch": 17.93, |
| "learning_rate": 2.1236931496508777e-05, |
| "loss": 0.2419, |
| "step": 31970 |
| }, |
| { |
| "epoch": 17.94, |
| "learning_rate": 2.1234100773730893e-05, |
| "loss": 0.2446, |
| "step": 31980 |
| }, |
| { |
| "epoch": 17.94, |
| "learning_rate": 2.123127005095301e-05, |
| "loss": 0.234, |
| "step": 31990 |
| }, |
| { |
| "epoch": 17.95, |
| "learning_rate": 2.1228439328175128e-05, |
| "loss": 0.2501, |
| "step": 32000 |
| }, |
| { |
| "epoch": 17.95, |
| "learning_rate": 2.1225608605397247e-05, |
| "loss": 0.2416, |
| "step": 32010 |
| }, |
| { |
| "epoch": 17.96, |
| "learning_rate": 2.1222777882619363e-05, |
| "loss": 0.2438, |
| "step": 32020 |
| }, |
| { |
| "epoch": 17.96, |
| "learning_rate": 2.121994715984148e-05, |
| "loss": 0.2334, |
| "step": 32030 |
| }, |
| { |
| "epoch": 17.97, |
| "learning_rate": 2.1217116437063598e-05, |
| "loss": 0.2334, |
| "step": 32040 |
| }, |
| { |
| "epoch": 17.97, |
| "learning_rate": 2.1214285714285717e-05, |
| "loss": 0.2307, |
| "step": 32050 |
| }, |
| { |
| "epoch": 17.98, |
| "learning_rate": 2.1211454991507833e-05, |
| "loss": 0.2381, |
| "step": 32060 |
| }, |
| { |
| "epoch": 17.99, |
| "learning_rate": 2.120862426872995e-05, |
| "loss": 0.2351, |
| "step": 32070 |
| }, |
| { |
| "epoch": 17.99, |
| "learning_rate": 2.1205793545952064e-05, |
| "loss": 0.2491, |
| "step": 32080 |
| }, |
| { |
| "epoch": 18.0, |
| "learning_rate": 2.1202962823174187e-05, |
| "loss": 0.2544, |
| "step": 32090 |
| }, |
| { |
| "epoch": 18.0, |
| "eval_cer": 0.1633351834698041, |
| "eval_loss": 0.4139803946018219, |
| "eval_runtime": 314.6177, |
| "eval_samples_per_second": 16.919, |
| "eval_steps_per_second": 4.231, |
| "eval_wer": 0.22069351730375364, |
| "step": 32094 |
| }, |
| { |
| "epoch": 18.0, |
| "learning_rate": 2.1200132100396303e-05, |
| "loss": 0.2542, |
| "step": 32100 |
| }, |
| { |
| "epoch": 18.01, |
| "learning_rate": 2.119730137761842e-05, |
| "loss": 0.2395, |
| "step": 32110 |
| }, |
| { |
| "epoch": 18.01, |
| "learning_rate": 2.1194470654840534e-05, |
| "loss": 0.2295, |
| "step": 32120 |
| }, |
| { |
| "epoch": 18.02, |
| "learning_rate": 2.1191639932062657e-05, |
| "loss": 0.2198, |
| "step": 32130 |
| }, |
| { |
| "epoch": 18.03, |
| "learning_rate": 2.1188809209284773e-05, |
| "loss": 0.2271, |
| "step": 32140 |
| }, |
| { |
| "epoch": 18.03, |
| "learning_rate": 2.118597848650689e-05, |
| "loss": 0.2177, |
| "step": 32150 |
| }, |
| { |
| "epoch": 18.04, |
| "learning_rate": 2.1183147763729004e-05, |
| "loss": 0.2451, |
| "step": 32160 |
| }, |
| { |
| "epoch": 18.04, |
| "learning_rate": 2.1180317040951123e-05, |
| "loss": 0.2361, |
| "step": 32170 |
| }, |
| { |
| "epoch": 18.05, |
| "learning_rate": 2.1177486318173243e-05, |
| "loss": 0.2374, |
| "step": 32180 |
| }, |
| { |
| "epoch": 18.05, |
| "learning_rate": 2.117465559539536e-05, |
| "loss": 0.2158, |
| "step": 32190 |
| }, |
| { |
| "epoch": 18.06, |
| "learning_rate": 2.1171824872617474e-05, |
| "loss": 0.2222, |
| "step": 32200 |
| }, |
| { |
| "epoch": 18.07, |
| "learning_rate": 2.1168994149839593e-05, |
| "loss": 0.2402, |
| "step": 32210 |
| }, |
| { |
| "epoch": 18.07, |
| "learning_rate": 2.1166163427061713e-05, |
| "loss": 0.2339, |
| "step": 32220 |
| }, |
| { |
| "epoch": 18.08, |
| "learning_rate": 2.116333270428383e-05, |
| "loss": 0.2425, |
| "step": 32230 |
| }, |
| { |
| "epoch": 18.08, |
| "learning_rate": 2.1160501981505944e-05, |
| "loss": 0.2257, |
| "step": 32240 |
| }, |
| { |
| "epoch": 18.09, |
| "learning_rate": 2.1157671258728063e-05, |
| "loss": 0.2233, |
| "step": 32250 |
| }, |
| { |
| "epoch": 18.09, |
| "learning_rate": 2.115484053595018e-05, |
| "loss": 0.2453, |
| "step": 32260 |
| }, |
| { |
| "epoch": 18.1, |
| "learning_rate": 2.11520098131723e-05, |
| "loss": 0.231, |
| "step": 32270 |
| }, |
| { |
| "epoch": 18.1, |
| "learning_rate": 2.1149179090394414e-05, |
| "loss": 0.2335, |
| "step": 32280 |
| }, |
| { |
| "epoch": 18.11, |
| "learning_rate": 2.114634836761653e-05, |
| "loss": 0.2442, |
| "step": 32290 |
| }, |
| { |
| "epoch": 18.12, |
| "learning_rate": 2.114351764483865e-05, |
| "loss": 0.2312, |
| "step": 32300 |
| }, |
| { |
| "epoch": 18.12, |
| "learning_rate": 2.114068692206077e-05, |
| "loss": 0.2319, |
| "step": 32310 |
| }, |
| { |
| "epoch": 18.13, |
| "learning_rate": 2.1137856199282884e-05, |
| "loss": 0.2286, |
| "step": 32320 |
| }, |
| { |
| "epoch": 18.13, |
| "learning_rate": 2.1135025476505e-05, |
| "loss": 0.2374, |
| "step": 32330 |
| }, |
| { |
| "epoch": 18.14, |
| "learning_rate": 2.113219475372712e-05, |
| "loss": 0.2388, |
| "step": 32340 |
| }, |
| { |
| "epoch": 18.14, |
| "learning_rate": 2.1129364030949235e-05, |
| "loss": 0.2209, |
| "step": 32350 |
| }, |
| { |
| "epoch": 18.15, |
| "learning_rate": 2.1126533308171354e-05, |
| "loss": 0.2335, |
| "step": 32360 |
| }, |
| { |
| "epoch": 18.15, |
| "learning_rate": 2.112370258539347e-05, |
| "loss": 0.2257, |
| "step": 32370 |
| }, |
| { |
| "epoch": 18.16, |
| "learning_rate": 2.112087186261559e-05, |
| "loss": 0.2315, |
| "step": 32380 |
| }, |
| { |
| "epoch": 18.17, |
| "learning_rate": 2.1118041139837705e-05, |
| "loss": 0.2261, |
| "step": 32390 |
| }, |
| { |
| "epoch": 18.17, |
| "learning_rate": 2.1115210417059824e-05, |
| "loss": 0.2134, |
| "step": 32400 |
| }, |
| { |
| "epoch": 18.18, |
| "learning_rate": 2.111237969428194e-05, |
| "loss": 0.2409, |
| "step": 32410 |
| }, |
| { |
| "epoch": 18.18, |
| "learning_rate": 2.110954897150406e-05, |
| "loss": 0.2337, |
| "step": 32420 |
| }, |
| { |
| "epoch": 18.19, |
| "learning_rate": 2.1106718248726175e-05, |
| "loss": 0.226, |
| "step": 32430 |
| }, |
| { |
| "epoch": 18.19, |
| "learning_rate": 2.110388752594829e-05, |
| "loss": 0.2335, |
| "step": 32440 |
| }, |
| { |
| "epoch": 18.2, |
| "learning_rate": 2.110105680317041e-05, |
| "loss": 0.2252, |
| "step": 32450 |
| }, |
| { |
| "epoch": 18.21, |
| "learning_rate": 2.109822608039253e-05, |
| "loss": 0.2372, |
| "step": 32460 |
| }, |
| { |
| "epoch": 18.21, |
| "learning_rate": 2.1095395357614645e-05, |
| "loss": 0.245, |
| "step": 32470 |
| }, |
| { |
| "epoch": 18.22, |
| "learning_rate": 2.109256463483676e-05, |
| "loss": 0.2232, |
| "step": 32480 |
| }, |
| { |
| "epoch": 18.22, |
| "learning_rate": 2.108973391205888e-05, |
| "loss": 0.2285, |
| "step": 32490 |
| }, |
| { |
| "epoch": 18.23, |
| "learning_rate": 2.1086903189281e-05, |
| "loss": 0.232, |
| "step": 32500 |
| }, |
| { |
| "epoch": 18.23, |
| "learning_rate": 2.1084072466503115e-05, |
| "loss": 0.2401, |
| "step": 32510 |
| }, |
| { |
| "epoch": 18.24, |
| "learning_rate": 2.108124174372523e-05, |
| "loss": 0.253, |
| "step": 32520 |
| }, |
| { |
| "epoch": 18.24, |
| "learning_rate": 2.1078411020947346e-05, |
| "loss": 0.2245, |
| "step": 32530 |
| }, |
| { |
| "epoch": 18.25, |
| "learning_rate": 2.107558029816947e-05, |
| "loss": 0.2311, |
| "step": 32540 |
| }, |
| { |
| "epoch": 18.26, |
| "learning_rate": 2.1072749575391585e-05, |
| "loss": 0.2104, |
| "step": 32550 |
| }, |
| { |
| "epoch": 18.26, |
| "learning_rate": 2.10699188526137e-05, |
| "loss": 0.2305, |
| "step": 32560 |
| }, |
| { |
| "epoch": 18.27, |
| "learning_rate": 2.1067088129835816e-05, |
| "loss": 0.225, |
| "step": 32570 |
| }, |
| { |
| "epoch": 18.27, |
| "learning_rate": 2.106425740705794e-05, |
| "loss": 0.2307, |
| "step": 32580 |
| }, |
| { |
| "epoch": 18.28, |
| "learning_rate": 2.1061426684280055e-05, |
| "loss": 0.2318, |
| "step": 32590 |
| }, |
| { |
| "epoch": 18.28, |
| "learning_rate": 2.105859596150217e-05, |
| "loss": 0.2219, |
| "step": 32600 |
| }, |
| { |
| "epoch": 18.29, |
| "learning_rate": 2.1055765238724286e-05, |
| "loss": 0.2343, |
| "step": 32610 |
| }, |
| { |
| "epoch": 18.29, |
| "learning_rate": 2.1052934515946406e-05, |
| "loss": 0.2262, |
| "step": 32620 |
| }, |
| { |
| "epoch": 18.3, |
| "learning_rate": 2.1050103793168525e-05, |
| "loss": 0.2334, |
| "step": 32630 |
| }, |
| { |
| "epoch": 18.31, |
| "learning_rate": 2.104727307039064e-05, |
| "loss": 0.2351, |
| "step": 32640 |
| }, |
| { |
| "epoch": 18.31, |
| "learning_rate": 2.1044442347612756e-05, |
| "loss": 0.2247, |
| "step": 32650 |
| }, |
| { |
| "epoch": 18.32, |
| "learning_rate": 2.1041611624834876e-05, |
| "loss": 0.2387, |
| "step": 32660 |
| }, |
| { |
| "epoch": 18.32, |
| "learning_rate": 2.1038780902056995e-05, |
| "loss": 0.2379, |
| "step": 32670 |
| }, |
| { |
| "epoch": 18.33, |
| "learning_rate": 2.103595017927911e-05, |
| "loss": 0.2241, |
| "step": 32680 |
| }, |
| { |
| "epoch": 18.33, |
| "learning_rate": 2.1033119456501226e-05, |
| "loss": 0.2224, |
| "step": 32690 |
| }, |
| { |
| "epoch": 18.34, |
| "learning_rate": 2.1030288733723342e-05, |
| "loss": 0.2232, |
| "step": 32700 |
| }, |
| { |
| "epoch": 18.35, |
| "learning_rate": 2.102745801094546e-05, |
| "loss": 0.2381, |
| "step": 32710 |
| }, |
| { |
| "epoch": 18.35, |
| "learning_rate": 2.102462728816758e-05, |
| "loss": 0.2318, |
| "step": 32720 |
| }, |
| { |
| "epoch": 18.36, |
| "learning_rate": 2.1021796565389696e-05, |
| "loss": 0.2263, |
| "step": 32730 |
| }, |
| { |
| "epoch": 18.36, |
| "learning_rate": 2.1018965842611812e-05, |
| "loss": 0.2368, |
| "step": 32740 |
| }, |
| { |
| "epoch": 18.37, |
| "learning_rate": 2.101613511983393e-05, |
| "loss": 0.2298, |
| "step": 32750 |
| }, |
| { |
| "epoch": 18.37, |
| "learning_rate": 2.1013587469333837e-05, |
| "loss": 0.2332, |
| "step": 32760 |
| }, |
| { |
| "epoch": 18.38, |
| "learning_rate": 2.1010756746555953e-05, |
| "loss": 0.2431, |
| "step": 32770 |
| }, |
| { |
| "epoch": 18.38, |
| "learning_rate": 2.1007926023778072e-05, |
| "loss": 0.2263, |
| "step": 32780 |
| }, |
| { |
| "epoch": 18.39, |
| "learning_rate": 2.100509530100019e-05, |
| "loss": 0.2325, |
| "step": 32790 |
| }, |
| { |
| "epoch": 18.4, |
| "learning_rate": 2.1002264578222307e-05, |
| "loss": 0.2251, |
| "step": 32800 |
| }, |
| { |
| "epoch": 18.4, |
| "learning_rate": 2.0999433855444423e-05, |
| "loss": 0.2303, |
| "step": 32810 |
| }, |
| { |
| "epoch": 18.41, |
| "learning_rate": 2.099660313266654e-05, |
| "loss": 0.24, |
| "step": 32820 |
| }, |
| { |
| "epoch": 18.41, |
| "learning_rate": 2.099377240988866e-05, |
| "loss": 0.2291, |
| "step": 32830 |
| }, |
| { |
| "epoch": 18.42, |
| "learning_rate": 2.0990941687110777e-05, |
| "loss": 0.2313, |
| "step": 32840 |
| }, |
| { |
| "epoch": 18.42, |
| "learning_rate": 2.0988110964332893e-05, |
| "loss": 0.2374, |
| "step": 32850 |
| }, |
| { |
| "epoch": 18.43, |
| "learning_rate": 2.098528024155501e-05, |
| "loss": 0.2379, |
| "step": 32860 |
| }, |
| { |
| "epoch": 18.44, |
| "learning_rate": 2.0982449518777128e-05, |
| "loss": 0.239, |
| "step": 32870 |
| }, |
| { |
| "epoch": 18.44, |
| "learning_rate": 2.0979618795999247e-05, |
| "loss": 0.2286, |
| "step": 32880 |
| }, |
| { |
| "epoch": 18.45, |
| "learning_rate": 2.0976788073221363e-05, |
| "loss": 0.2207, |
| "step": 32890 |
| }, |
| { |
| "epoch": 18.45, |
| "learning_rate": 2.097395735044348e-05, |
| "loss": 0.2414, |
| "step": 32900 |
| }, |
| { |
| "epoch": 18.46, |
| "learning_rate": 2.0971126627665598e-05, |
| "loss": 0.2364, |
| "step": 32910 |
| }, |
| { |
| "epoch": 18.46, |
| "learning_rate": 2.0968295904887717e-05, |
| "loss": 0.2301, |
| "step": 32920 |
| }, |
| { |
| "epoch": 18.47, |
| "learning_rate": 2.0965465182109833e-05, |
| "loss": 0.234, |
| "step": 32930 |
| }, |
| { |
| "epoch": 18.47, |
| "learning_rate": 2.096263445933195e-05, |
| "loss": 0.2314, |
| "step": 32940 |
| }, |
| { |
| "epoch": 18.48, |
| "learning_rate": 2.0959803736554068e-05, |
| "loss": 0.2167, |
| "step": 32950 |
| }, |
| { |
| "epoch": 18.49, |
| "learning_rate": 2.0956973013776183e-05, |
| "loss": 0.2298, |
| "step": 32960 |
| }, |
| { |
| "epoch": 18.49, |
| "learning_rate": 2.0954142290998303e-05, |
| "loss": 0.2481, |
| "step": 32970 |
| }, |
| { |
| "epoch": 18.5, |
| "learning_rate": 2.095131156822042e-05, |
| "loss": 0.2411, |
| "step": 32980 |
| }, |
| { |
| "epoch": 18.5, |
| "learning_rate": 2.0948480845442538e-05, |
| "loss": 0.2209, |
| "step": 32990 |
| }, |
| { |
| "epoch": 18.51, |
| "learning_rate": 2.0945650122664653e-05, |
| "loss": 0.236, |
| "step": 33000 |
| }, |
| { |
| "epoch": 18.51, |
| "learning_rate": 2.0942819399886773e-05, |
| "loss": 0.2499, |
| "step": 33010 |
| }, |
| { |
| "epoch": 18.52, |
| "learning_rate": 2.093998867710889e-05, |
| "loss": 0.2391, |
| "step": 33020 |
| }, |
| { |
| "epoch": 18.52, |
| "learning_rate": 2.0937157954331008e-05, |
| "loss": 0.2412, |
| "step": 33030 |
| }, |
| { |
| "epoch": 18.53, |
| "learning_rate": 2.0934327231553123e-05, |
| "loss": 0.2308, |
| "step": 33040 |
| }, |
| { |
| "epoch": 18.54, |
| "learning_rate": 2.093149650877524e-05, |
| "loss": 0.2272, |
| "step": 33050 |
| }, |
| { |
| "epoch": 18.54, |
| "learning_rate": 2.092866578599736e-05, |
| "loss": 0.2387, |
| "step": 33060 |
| }, |
| { |
| "epoch": 18.55, |
| "learning_rate": 2.0925835063219478e-05, |
| "loss": 0.233, |
| "step": 33070 |
| }, |
| { |
| "epoch": 18.55, |
| "learning_rate": 2.0923004340441593e-05, |
| "loss": 0.2343, |
| "step": 33080 |
| }, |
| { |
| "epoch": 18.56, |
| "learning_rate": 2.092017361766371e-05, |
| "loss": 0.2381, |
| "step": 33090 |
| }, |
| { |
| "epoch": 18.56, |
| "learning_rate": 2.091734289488583e-05, |
| "loss": 0.2337, |
| "step": 33100 |
| }, |
| { |
| "epoch": 18.57, |
| "learning_rate": 2.0914512172107948e-05, |
| "loss": 0.2395, |
| "step": 33110 |
| }, |
| { |
| "epoch": 18.58, |
| "learning_rate": 2.0911681449330063e-05, |
| "loss": 0.2295, |
| "step": 33120 |
| }, |
| { |
| "epoch": 18.58, |
| "learning_rate": 2.090885072655218e-05, |
| "loss": 0.2338, |
| "step": 33130 |
| }, |
| { |
| "epoch": 18.59, |
| "learning_rate": 2.0906020003774295e-05, |
| "loss": 0.2369, |
| "step": 33140 |
| }, |
| { |
| "epoch": 18.59, |
| "learning_rate": 2.0903189280996417e-05, |
| "loss": 0.2264, |
| "step": 33150 |
| }, |
| { |
| "epoch": 18.6, |
| "learning_rate": 2.0900358558218533e-05, |
| "loss": 0.2365, |
| "step": 33160 |
| }, |
| { |
| "epoch": 18.6, |
| "learning_rate": 2.089752783544065e-05, |
| "loss": 0.2357, |
| "step": 33170 |
| }, |
| { |
| "epoch": 18.61, |
| "learning_rate": 2.0894697112662765e-05, |
| "loss": 0.2449, |
| "step": 33180 |
| }, |
| { |
| "epoch": 18.61, |
| "learning_rate": 2.0891866389884887e-05, |
| "loss": 0.2354, |
| "step": 33190 |
| }, |
| { |
| "epoch": 18.62, |
| "learning_rate": 2.0889035667107003e-05, |
| "loss": 0.236, |
| "step": 33200 |
| }, |
| { |
| "epoch": 18.63, |
| "learning_rate": 2.088620494432912e-05, |
| "loss": 0.2318, |
| "step": 33210 |
| }, |
| { |
| "epoch": 18.63, |
| "learning_rate": 2.0883374221551235e-05, |
| "loss": 0.2403, |
| "step": 33220 |
| }, |
| { |
| "epoch": 18.64, |
| "learning_rate": 2.088054349877335e-05, |
| "loss": 0.235, |
| "step": 33230 |
| }, |
| { |
| "epoch": 18.64, |
| "learning_rate": 2.0877712775995473e-05, |
| "loss": 0.2313, |
| "step": 33240 |
| }, |
| { |
| "epoch": 18.65, |
| "learning_rate": 2.087488205321759e-05, |
| "loss": 0.2228, |
| "step": 33250 |
| }, |
| { |
| "epoch": 18.65, |
| "learning_rate": 2.0872051330439705e-05, |
| "loss": 0.2321, |
| "step": 33260 |
| }, |
| { |
| "epoch": 18.66, |
| "learning_rate": 2.086922060766182e-05, |
| "loss": 0.2362, |
| "step": 33270 |
| }, |
| { |
| "epoch": 18.66, |
| "learning_rate": 2.0866389884883943e-05, |
| "loss": 0.2316, |
| "step": 33280 |
| }, |
| { |
| "epoch": 18.67, |
| "learning_rate": 2.086355916210606e-05, |
| "loss": 0.2357, |
| "step": 33290 |
| }, |
| { |
| "epoch": 18.68, |
| "learning_rate": 2.0860728439328175e-05, |
| "loss": 0.2264, |
| "step": 33300 |
| }, |
| { |
| "epoch": 18.68, |
| "learning_rate": 2.085789771655029e-05, |
| "loss": 0.2521, |
| "step": 33310 |
| }, |
| { |
| "epoch": 18.69, |
| "learning_rate": 2.085506699377241e-05, |
| "loss": 0.2323, |
| "step": 33320 |
| }, |
| { |
| "epoch": 18.69, |
| "learning_rate": 2.085223627099453e-05, |
| "loss": 0.2343, |
| "step": 33330 |
| }, |
| { |
| "epoch": 18.7, |
| "learning_rate": 2.0849405548216645e-05, |
| "loss": 0.2458, |
| "step": 33340 |
| }, |
| { |
| "epoch": 18.7, |
| "learning_rate": 2.084657482543876e-05, |
| "loss": 0.2294, |
| "step": 33350 |
| }, |
| { |
| "epoch": 18.71, |
| "learning_rate": 2.084374410266088e-05, |
| "loss": 0.2415, |
| "step": 33360 |
| }, |
| { |
| "epoch": 18.72, |
| "learning_rate": 2.0840913379883e-05, |
| "loss": 0.2443, |
| "step": 33370 |
| }, |
| { |
| "epoch": 18.72, |
| "learning_rate": 2.0838082657105115e-05, |
| "loss": 0.2365, |
| "step": 33380 |
| }, |
| { |
| "epoch": 18.73, |
| "learning_rate": 2.083525193432723e-05, |
| "loss": 0.2333, |
| "step": 33390 |
| }, |
| { |
| "epoch": 18.73, |
| "learning_rate": 2.083242121154935e-05, |
| "loss": 0.2243, |
| "step": 33400 |
| }, |
| { |
| "epoch": 18.74, |
| "learning_rate": 2.0829590488771466e-05, |
| "loss": 0.2384, |
| "step": 33410 |
| }, |
| { |
| "epoch": 18.74, |
| "learning_rate": 2.0826759765993585e-05, |
| "loss": 0.2428, |
| "step": 33420 |
| }, |
| { |
| "epoch": 18.75, |
| "learning_rate": 2.08239290432157e-05, |
| "loss": 0.2185, |
| "step": 33430 |
| }, |
| { |
| "epoch": 18.75, |
| "learning_rate": 2.082109832043782e-05, |
| "loss": 0.2413, |
| "step": 33440 |
| }, |
| { |
| "epoch": 18.76, |
| "learning_rate": 2.0818267597659936e-05, |
| "loss": 0.2233, |
| "step": 33450 |
| }, |
| { |
| "epoch": 18.77, |
| "learning_rate": 2.0815436874882055e-05, |
| "loss": 0.2358, |
| "step": 33460 |
| }, |
| { |
| "epoch": 18.77, |
| "learning_rate": 2.081260615210417e-05, |
| "loss": 0.2322, |
| "step": 33470 |
| }, |
| { |
| "epoch": 18.78, |
| "learning_rate": 2.080977542932629e-05, |
| "loss": 0.2264, |
| "step": 33480 |
| }, |
| { |
| "epoch": 18.78, |
| "learning_rate": 2.0806944706548406e-05, |
| "loss": 0.2312, |
| "step": 33490 |
| }, |
| { |
| "epoch": 18.79, |
| "learning_rate": 2.080411398377052e-05, |
| "loss": 0.2245, |
| "step": 33500 |
| }, |
| { |
| "epoch": 18.79, |
| "learning_rate": 2.080128326099264e-05, |
| "loss": 0.2546, |
| "step": 33510 |
| }, |
| { |
| "epoch": 18.8, |
| "learning_rate": 2.079845253821476e-05, |
| "loss": 0.2449, |
| "step": 33520 |
| }, |
| { |
| "epoch": 18.81, |
| "learning_rate": 2.0795621815436875e-05, |
| "loss": 0.2401, |
| "step": 33530 |
| }, |
| { |
| "epoch": 18.81, |
| "learning_rate": 2.079279109265899e-05, |
| "loss": 0.23, |
| "step": 33540 |
| }, |
| { |
| "epoch": 18.82, |
| "learning_rate": 2.078996036988111e-05, |
| "loss": 0.2356, |
| "step": 33550 |
| }, |
| { |
| "epoch": 18.82, |
| "learning_rate": 2.078712964710323e-05, |
| "loss": 0.2266, |
| "step": 33560 |
| }, |
| { |
| "epoch": 18.83, |
| "learning_rate": 2.0784298924325345e-05, |
| "loss": 0.2306, |
| "step": 33570 |
| }, |
| { |
| "epoch": 18.83, |
| "learning_rate": 2.078146820154746e-05, |
| "loss": 0.2317, |
| "step": 33580 |
| }, |
| { |
| "epoch": 18.84, |
| "learning_rate": 2.0778637478769577e-05, |
| "loss": 0.2269, |
| "step": 33590 |
| }, |
| { |
| "epoch": 18.84, |
| "learning_rate": 2.07758067559917e-05, |
| "loss": 0.2291, |
| "step": 33600 |
| }, |
| { |
| "epoch": 18.85, |
| "learning_rate": 2.0772976033213815e-05, |
| "loss": 0.2412, |
| "step": 33610 |
| }, |
| { |
| "epoch": 18.86, |
| "learning_rate": 2.077014531043593e-05, |
| "loss": 0.229, |
| "step": 33620 |
| }, |
| { |
| "epoch": 18.86, |
| "learning_rate": 2.0767314587658047e-05, |
| "loss": 0.2297, |
| "step": 33630 |
| }, |
| { |
| "epoch": 18.87, |
| "learning_rate": 2.076448386488017e-05, |
| "loss": 0.2359, |
| "step": 33640 |
| }, |
| { |
| "epoch": 18.87, |
| "learning_rate": 2.0761653142102285e-05, |
| "loss": 0.2187, |
| "step": 33650 |
| }, |
| { |
| "epoch": 18.88, |
| "learning_rate": 2.07588224193244e-05, |
| "loss": 0.243, |
| "step": 33660 |
| }, |
| { |
| "epoch": 18.88, |
| "learning_rate": 2.0755991696546517e-05, |
| "loss": 0.2166, |
| "step": 33670 |
| }, |
| { |
| "epoch": 18.89, |
| "learning_rate": 2.0753160973768636e-05, |
| "loss": 0.2272, |
| "step": 33680 |
| }, |
| { |
| "epoch": 18.89, |
| "learning_rate": 2.0750330250990755e-05, |
| "loss": 0.2461, |
| "step": 33690 |
| }, |
| { |
| "epoch": 18.9, |
| "learning_rate": 2.074749952821287e-05, |
| "loss": 0.2341, |
| "step": 33700 |
| }, |
| { |
| "epoch": 18.91, |
| "learning_rate": 2.0744668805434987e-05, |
| "loss": 0.2391, |
| "step": 33710 |
| }, |
| { |
| "epoch": 18.91, |
| "learning_rate": 2.0741838082657103e-05, |
| "loss": 0.2348, |
| "step": 33720 |
| }, |
| { |
| "epoch": 18.92, |
| "learning_rate": 2.0739007359879225e-05, |
| "loss": 0.2483, |
| "step": 33730 |
| }, |
| { |
| "epoch": 18.92, |
| "learning_rate": 2.073617663710134e-05, |
| "loss": 0.2289, |
| "step": 33740 |
| }, |
| { |
| "epoch": 18.93, |
| "learning_rate": 2.0733345914323457e-05, |
| "loss": 0.222, |
| "step": 33750 |
| }, |
| { |
| "epoch": 18.93, |
| "learning_rate": 2.0730515191545573e-05, |
| "loss": 0.245, |
| "step": 33760 |
| }, |
| { |
| "epoch": 18.94, |
| "learning_rate": 2.0727684468767692e-05, |
| "loss": 0.2256, |
| "step": 33770 |
| }, |
| { |
| "epoch": 18.95, |
| "learning_rate": 2.072485374598981e-05, |
| "loss": 0.2296, |
| "step": 33780 |
| }, |
| { |
| "epoch": 18.95, |
| "learning_rate": 2.0722023023211927e-05, |
| "loss": 0.227, |
| "step": 33790 |
| }, |
| { |
| "epoch": 18.96, |
| "learning_rate": 2.0719192300434043e-05, |
| "loss": 0.244, |
| "step": 33800 |
| }, |
| { |
| "epoch": 18.96, |
| "learning_rate": 2.0716361577656162e-05, |
| "loss": 0.2409, |
| "step": 33810 |
| }, |
| { |
| "epoch": 18.97, |
| "learning_rate": 2.071353085487828e-05, |
| "loss": 0.2499, |
| "step": 33820 |
| }, |
| { |
| "epoch": 18.97, |
| "learning_rate": 2.0710700132100397e-05, |
| "loss": 0.2265, |
| "step": 33830 |
| }, |
| { |
| "epoch": 18.98, |
| "learning_rate": 2.0707869409322513e-05, |
| "loss": 0.2257, |
| "step": 33840 |
| }, |
| { |
| "epoch": 18.98, |
| "learning_rate": 2.0705038686544632e-05, |
| "loss": 0.2189, |
| "step": 33850 |
| }, |
| { |
| "epoch": 18.99, |
| "learning_rate": 2.0702207963766748e-05, |
| "loss": 0.2456, |
| "step": 33860 |
| }, |
| { |
| "epoch": 19.0, |
| "learning_rate": 2.0699377240988867e-05, |
| "loss": 0.2294, |
| "step": 33870 |
| }, |
| { |
| "epoch": 19.0, |
| "eval_cer": 0.16376956333685416, |
| "eval_loss": 0.4191686809062958, |
| "eval_runtime": 317.0929, |
| "eval_samples_per_second": 16.787, |
| "eval_steps_per_second": 4.198, |
| "eval_wer": 0.22066722579932746, |
| "step": 33877 |
| }, |
| { |
| "epoch": 19.0, |
| "learning_rate": 2.0696546518210983e-05, |
| "loss": 0.2428, |
| "step": 33880 |
| }, |
| { |
| "epoch": 19.01, |
| "learning_rate": 2.0693715795433102e-05, |
| "loss": 0.2244, |
| "step": 33890 |
| }, |
| { |
| "epoch": 19.01, |
| "learning_rate": 2.0690885072655218e-05, |
| "loss": 0.224, |
| "step": 33900 |
| }, |
| { |
| "epoch": 19.02, |
| "learning_rate": 2.0688054349877337e-05, |
| "loss": 0.2276, |
| "step": 33910 |
| }, |
| { |
| "epoch": 19.02, |
| "learning_rate": 2.0685223627099453e-05, |
| "loss": 0.2273, |
| "step": 33920 |
| }, |
| { |
| "epoch": 19.03, |
| "learning_rate": 2.0682392904321572e-05, |
| "loss": 0.2192, |
| "step": 33930 |
| }, |
| { |
| "epoch": 19.04, |
| "learning_rate": 2.0679562181543688e-05, |
| "loss": 0.2295, |
| "step": 33940 |
| }, |
| { |
| "epoch": 19.04, |
| "learning_rate": 2.0676731458765803e-05, |
| "loss": 0.2369, |
| "step": 33950 |
| }, |
| { |
| "epoch": 19.05, |
| "learning_rate": 2.0673900735987923e-05, |
| "loss": 0.2135, |
| "step": 33960 |
| }, |
| { |
| "epoch": 19.05, |
| "learning_rate": 2.0671070013210042e-05, |
| "loss": 0.2259, |
| "step": 33970 |
| }, |
| { |
| "epoch": 19.06, |
| "learning_rate": 2.0668239290432158e-05, |
| "loss": 0.2305, |
| "step": 33980 |
| }, |
| { |
| "epoch": 19.06, |
| "learning_rate": 2.0665408567654273e-05, |
| "loss": 0.2406, |
| "step": 33990 |
| }, |
| { |
| "epoch": 19.07, |
| "learning_rate": 2.0662577844876393e-05, |
| "loss": 0.2282, |
| "step": 34000 |
| }, |
| { |
| "epoch": 19.07, |
| "learning_rate": 2.0659747122098512e-05, |
| "loss": 0.2311, |
| "step": 34010 |
| }, |
| { |
| "epoch": 19.08, |
| "learning_rate": 2.0656916399320628e-05, |
| "loss": 0.2199, |
| "step": 34020 |
| }, |
| { |
| "epoch": 19.09, |
| "learning_rate": 2.0654085676542743e-05, |
| "loss": 0.2089, |
| "step": 34030 |
| }, |
| { |
| "epoch": 19.09, |
| "learning_rate": 2.0651254953764863e-05, |
| "loss": 0.2378, |
| "step": 34040 |
| }, |
| { |
| "epoch": 19.1, |
| "learning_rate": 2.0648424230986982e-05, |
| "loss": 0.221, |
| "step": 34050 |
| }, |
| { |
| "epoch": 19.1, |
| "learning_rate": 2.0645593508209098e-05, |
| "loss": 0.2261, |
| "step": 34060 |
| }, |
| { |
| "epoch": 19.11, |
| "learning_rate": 2.0642762785431213e-05, |
| "loss": 0.2208, |
| "step": 34070 |
| }, |
| { |
| "epoch": 19.11, |
| "learning_rate": 2.063993206265333e-05, |
| "loss": 0.2094, |
| "step": 34080 |
| }, |
| { |
| "epoch": 19.12, |
| "learning_rate": 2.063710133987545e-05, |
| "loss": 0.2173, |
| "step": 34090 |
| }, |
| { |
| "epoch": 19.13, |
| "learning_rate": 2.0634270617097568e-05, |
| "loss": 0.221, |
| "step": 34100 |
| }, |
| { |
| "epoch": 19.13, |
| "learning_rate": 2.0631439894319683e-05, |
| "loss": 0.2346, |
| "step": 34110 |
| }, |
| { |
| "epoch": 19.14, |
| "learning_rate": 2.06286091715418e-05, |
| "loss": 0.2245, |
| "step": 34120 |
| }, |
| { |
| "epoch": 19.14, |
| "learning_rate": 2.062577844876392e-05, |
| "loss": 0.2011, |
| "step": 34130 |
| }, |
| { |
| "epoch": 19.15, |
| "learning_rate": 2.0622947725986038e-05, |
| "loss": 0.225, |
| "step": 34140 |
| }, |
| { |
| "epoch": 19.15, |
| "learning_rate": 2.0620117003208153e-05, |
| "loss": 0.2289, |
| "step": 34150 |
| }, |
| { |
| "epoch": 19.16, |
| "learning_rate": 2.061728628043027e-05, |
| "loss": 0.2289, |
| "step": 34160 |
| }, |
| { |
| "epoch": 19.16, |
| "learning_rate": 2.0614455557652385e-05, |
| "loss": 0.2251, |
| "step": 34170 |
| }, |
| { |
| "epoch": 19.17, |
| "learning_rate": 2.0611624834874508e-05, |
| "loss": 0.2036, |
| "step": 34180 |
| }, |
| { |
| "epoch": 19.18, |
| "learning_rate": 2.0608794112096623e-05, |
| "loss": 0.2339, |
| "step": 34190 |
| }, |
| { |
| "epoch": 19.18, |
| "learning_rate": 2.060596338931874e-05, |
| "loss": 0.2314, |
| "step": 34200 |
| }, |
| { |
| "epoch": 19.19, |
| "learning_rate": 2.0603132666540855e-05, |
| "loss": 0.2303, |
| "step": 34210 |
| }, |
| { |
| "epoch": 19.19, |
| "learning_rate": 2.0600301943762974e-05, |
| "loss": 0.2348, |
| "step": 34220 |
| }, |
| { |
| "epoch": 19.2, |
| "learning_rate": 2.0597471220985093e-05, |
| "loss": 0.2193, |
| "step": 34230 |
| }, |
| { |
| "epoch": 19.2, |
| "learning_rate": 2.059464049820721e-05, |
| "loss": 0.23, |
| "step": 34240 |
| }, |
| { |
| "epoch": 19.21, |
| "learning_rate": 2.0591809775429325e-05, |
| "loss": 0.2313, |
| "step": 34250 |
| }, |
| { |
| "epoch": 19.21, |
| "learning_rate": 2.0588979052651444e-05, |
| "loss": 0.2219, |
| "step": 34260 |
| }, |
| { |
| "epoch": 19.22, |
| "learning_rate": 2.0586148329873563e-05, |
| "loss": 0.2291, |
| "step": 34270 |
| }, |
| { |
| "epoch": 19.23, |
| "learning_rate": 2.058331760709568e-05, |
| "loss": 0.2121, |
| "step": 34280 |
| }, |
| { |
| "epoch": 19.23, |
| "learning_rate": 2.0580486884317795e-05, |
| "loss": 0.2366, |
| "step": 34290 |
| }, |
| { |
| "epoch": 19.24, |
| "learning_rate": 2.0577656161539914e-05, |
| "loss": 0.2199, |
| "step": 34300 |
| }, |
| { |
| "epoch": 19.24, |
| "learning_rate": 2.0574825438762033e-05, |
| "loss": 0.2239, |
| "step": 34310 |
| }, |
| { |
| "epoch": 19.25, |
| "learning_rate": 2.057199471598415e-05, |
| "loss": 0.2401, |
| "step": 34320 |
| }, |
| { |
| "epoch": 19.25, |
| "learning_rate": 2.0569163993206265e-05, |
| "loss": 0.207, |
| "step": 34330 |
| }, |
| { |
| "epoch": 19.26, |
| "learning_rate": 2.0566333270428384e-05, |
| "loss": 0.2269, |
| "step": 34340 |
| }, |
| { |
| "epoch": 19.27, |
| "learning_rate": 2.05635025476505e-05, |
| "loss": 0.2214, |
| "step": 34350 |
| }, |
| { |
| "epoch": 19.27, |
| "learning_rate": 2.056067182487262e-05, |
| "loss": 0.2203, |
| "step": 34360 |
| }, |
| { |
| "epoch": 19.28, |
| "learning_rate": 2.0557841102094735e-05, |
| "loss": 0.2285, |
| "step": 34370 |
| }, |
| { |
| "epoch": 19.28, |
| "learning_rate": 2.0555010379316854e-05, |
| "loss": 0.2102, |
| "step": 34380 |
| }, |
| { |
| "epoch": 19.29, |
| "learning_rate": 2.055217965653897e-05, |
| "loss": 0.2254, |
| "step": 34390 |
| }, |
| { |
| "epoch": 19.29, |
| "learning_rate": 2.054934893376109e-05, |
| "loss": 0.2224, |
| "step": 34400 |
| }, |
| { |
| "epoch": 19.3, |
| "learning_rate": 2.0546518210983205e-05, |
| "loss": 0.2298, |
| "step": 34410 |
| }, |
| { |
| "epoch": 19.3, |
| "learning_rate": 2.0543687488205324e-05, |
| "loss": 0.2169, |
| "step": 34420 |
| }, |
| { |
| "epoch": 19.31, |
| "learning_rate": 2.054085676542744e-05, |
| "loss": 0.2139, |
| "step": 34430 |
| }, |
| { |
| "epoch": 19.32, |
| "learning_rate": 2.0538026042649556e-05, |
| "loss": 0.2416, |
| "step": 34440 |
| }, |
| { |
| "epoch": 19.32, |
| "learning_rate": 2.0535195319871675e-05, |
| "loss": 0.2325, |
| "step": 34450 |
| }, |
| { |
| "epoch": 19.33, |
| "learning_rate": 2.0532364597093794e-05, |
| "loss": 0.2256, |
| "step": 34460 |
| }, |
| { |
| "epoch": 19.33, |
| "learning_rate": 2.052953387431591e-05, |
| "loss": 0.2353, |
| "step": 34470 |
| }, |
| { |
| "epoch": 19.34, |
| "learning_rate": 2.0526703151538026e-05, |
| "loss": 0.2164, |
| "step": 34480 |
| }, |
| { |
| "epoch": 19.34, |
| "learning_rate": 2.0523872428760145e-05, |
| "loss": 0.2314, |
| "step": 34490 |
| }, |
| { |
| "epoch": 19.35, |
| "learning_rate": 2.052104170598226e-05, |
| "loss": 0.2278, |
| "step": 34500 |
| }, |
| { |
| "epoch": 19.35, |
| "learning_rate": 2.051821098320438e-05, |
| "loss": 0.221, |
| "step": 34510 |
| }, |
| { |
| "epoch": 19.36, |
| "learning_rate": 2.0515380260426496e-05, |
| "loss": 0.2313, |
| "step": 34520 |
| }, |
| { |
| "epoch": 19.37, |
| "learning_rate": 2.051254953764861e-05, |
| "loss": 0.2121, |
| "step": 34530 |
| }, |
| { |
| "epoch": 19.37, |
| "learning_rate": 2.050971881487073e-05, |
| "loss": 0.2455, |
| "step": 34540 |
| }, |
| { |
| "epoch": 19.38, |
| "learning_rate": 2.050688809209285e-05, |
| "loss": 0.2267, |
| "step": 34550 |
| }, |
| { |
| "epoch": 19.38, |
| "learning_rate": 2.0504057369314966e-05, |
| "loss": 0.227, |
| "step": 34560 |
| }, |
| { |
| "epoch": 19.39, |
| "learning_rate": 2.050122664653708e-05, |
| "loss": 0.2218, |
| "step": 34570 |
| }, |
| { |
| "epoch": 19.39, |
| "learning_rate": 2.04983959237592e-05, |
| "loss": 0.199, |
| "step": 34580 |
| }, |
| { |
| "epoch": 19.4, |
| "learning_rate": 2.049556520098132e-05, |
| "loss": 0.2332, |
| "step": 34590 |
| }, |
| { |
| "epoch": 19.41, |
| "learning_rate": 2.0492734478203436e-05, |
| "loss": 0.2364, |
| "step": 34600 |
| }, |
| { |
| "epoch": 19.41, |
| "learning_rate": 2.048990375542555e-05, |
| "loss": 0.2132, |
| "step": 34610 |
| }, |
| { |
| "epoch": 19.42, |
| "learning_rate": 2.0487073032647667e-05, |
| "loss": 0.2288, |
| "step": 34620 |
| }, |
| { |
| "epoch": 19.42, |
| "learning_rate": 2.048424230986979e-05, |
| "loss": 0.2048, |
| "step": 34630 |
| }, |
| { |
| "epoch": 19.43, |
| "learning_rate": 2.0481411587091906e-05, |
| "loss": 0.2403, |
| "step": 34640 |
| }, |
| { |
| "epoch": 19.43, |
| "learning_rate": 2.047858086431402e-05, |
| "loss": 0.2217, |
| "step": 34650 |
| }, |
| { |
| "epoch": 19.44, |
| "learning_rate": 2.0475750141536137e-05, |
| "loss": 0.2209, |
| "step": 34660 |
| }, |
| { |
| "epoch": 19.44, |
| "learning_rate": 2.047291941875826e-05, |
| "loss": 0.2248, |
| "step": 34670 |
| }, |
| { |
| "epoch": 19.45, |
| "learning_rate": 2.0470088695980375e-05, |
| "loss": 0.2097, |
| "step": 34680 |
| }, |
| { |
| "epoch": 19.46, |
| "learning_rate": 2.046725797320249e-05, |
| "loss": 0.2292, |
| "step": 34690 |
| }, |
| { |
| "epoch": 19.46, |
| "learning_rate": 2.0464427250424607e-05, |
| "loss": 0.2319, |
| "step": 34700 |
| }, |
| { |
| "epoch": 19.47, |
| "learning_rate": 2.0461596527646726e-05, |
| "loss": 0.2285, |
| "step": 34710 |
| }, |
| { |
| "epoch": 19.47, |
| "learning_rate": 2.0458765804868845e-05, |
| "loss": 0.2191, |
| "step": 34720 |
| }, |
| { |
| "epoch": 19.48, |
| "learning_rate": 2.0456218154368748e-05, |
| "loss": 0.219, |
| "step": 34730 |
| }, |
| { |
| "epoch": 19.48, |
| "learning_rate": 2.0453387431590867e-05, |
| "loss": 0.2356, |
| "step": 34740 |
| }, |
| { |
| "epoch": 19.49, |
| "learning_rate": 2.0450556708812986e-05, |
| "loss": 0.2298, |
| "step": 34750 |
| }, |
| { |
| "epoch": 19.5, |
| "learning_rate": 2.0447725986035102e-05, |
| "loss": 0.2341, |
| "step": 34760 |
| }, |
| { |
| "epoch": 19.5, |
| "learning_rate": 2.0444895263257218e-05, |
| "loss": 0.2297, |
| "step": 34770 |
| }, |
| { |
| "epoch": 19.51, |
| "learning_rate": 2.0442064540479333e-05, |
| "loss": 0.2077, |
| "step": 34780 |
| }, |
| { |
| "epoch": 19.51, |
| "learning_rate": 2.0439233817701456e-05, |
| "loss": 0.2325, |
| "step": 34790 |
| }, |
| { |
| "epoch": 19.52, |
| "learning_rate": 2.0436403094923572e-05, |
| "loss": 0.234, |
| "step": 34800 |
| }, |
| { |
| "epoch": 19.52, |
| "learning_rate": 2.0433572372145688e-05, |
| "loss": 0.2158, |
| "step": 34810 |
| }, |
| { |
| "epoch": 19.53, |
| "learning_rate": 2.0430741649367803e-05, |
| "loss": 0.2265, |
| "step": 34820 |
| }, |
| { |
| "epoch": 19.53, |
| "learning_rate": 2.0427910926589926e-05, |
| "loss": 0.2305, |
| "step": 34830 |
| }, |
| { |
| "epoch": 19.54, |
| "learning_rate": 2.0425080203812042e-05, |
| "loss": 0.239, |
| "step": 34840 |
| }, |
| { |
| "epoch": 19.55, |
| "learning_rate": 2.0422249481034158e-05, |
| "loss": 0.2208, |
| "step": 34850 |
| }, |
| { |
| "epoch": 19.55, |
| "learning_rate": 2.0419418758256273e-05, |
| "loss": 0.2318, |
| "step": 34860 |
| }, |
| { |
| "epoch": 19.56, |
| "learning_rate": 2.0416588035478393e-05, |
| "loss": 0.2334, |
| "step": 34870 |
| }, |
| { |
| "epoch": 19.56, |
| "learning_rate": 2.0413757312700512e-05, |
| "loss": 0.2093, |
| "step": 34880 |
| }, |
| { |
| "epoch": 19.57, |
| "learning_rate": 2.0410926589922628e-05, |
| "loss": 0.2393, |
| "step": 34890 |
| }, |
| { |
| "epoch": 19.57, |
| "learning_rate": 2.0408095867144743e-05, |
| "loss": 0.2336, |
| "step": 34900 |
| }, |
| { |
| "epoch": 19.58, |
| "learning_rate": 2.0405265144366863e-05, |
| "loss": 0.2315, |
| "step": 34910 |
| }, |
| { |
| "epoch": 19.58, |
| "learning_rate": 2.0402434421588982e-05, |
| "loss": 0.2197, |
| "step": 34920 |
| }, |
| { |
| "epoch": 19.59, |
| "learning_rate": 2.0399603698811098e-05, |
| "loss": 0.2031, |
| "step": 34930 |
| }, |
| { |
| "epoch": 19.6, |
| "learning_rate": 2.0396772976033213e-05, |
| "loss": 0.2376, |
| "step": 34940 |
| }, |
| { |
| "epoch": 19.6, |
| "learning_rate": 2.0393942253255333e-05, |
| "loss": 0.2237, |
| "step": 34950 |
| }, |
| { |
| "epoch": 19.61, |
| "learning_rate": 2.039111153047745e-05, |
| "loss": 0.2282, |
| "step": 34960 |
| }, |
| { |
| "epoch": 19.61, |
| "learning_rate": 2.0388280807699568e-05, |
| "loss": 0.2284, |
| "step": 34970 |
| }, |
| { |
| "epoch": 19.62, |
| "learning_rate": 2.0385450084921683e-05, |
| "loss": 0.2098, |
| "step": 34980 |
| }, |
| { |
| "epoch": 19.62, |
| "learning_rate": 2.0382619362143803e-05, |
| "loss": 0.2214, |
| "step": 34990 |
| }, |
| { |
| "epoch": 19.63, |
| "learning_rate": 2.037978863936592e-05, |
| "loss": 0.2352, |
| "step": 35000 |
| }, |
| { |
| "epoch": 19.64, |
| "learning_rate": 2.0376957916588037e-05, |
| "loss": 0.2355, |
| "step": 35010 |
| }, |
| { |
| "epoch": 19.64, |
| "learning_rate": 2.0374127193810153e-05, |
| "loss": 0.225, |
| "step": 35020 |
| }, |
| { |
| "epoch": 19.65, |
| "learning_rate": 2.037129647103227e-05, |
| "loss": 0.2059, |
| "step": 35030 |
| }, |
| { |
| "epoch": 19.65, |
| "learning_rate": 2.0368465748254388e-05, |
| "loss": 0.2382, |
| "step": 35040 |
| }, |
| { |
| "epoch": 19.66, |
| "learning_rate": 2.0365635025476504e-05, |
| "loss": 0.2388, |
| "step": 35050 |
| }, |
| { |
| "epoch": 19.66, |
| "learning_rate": 2.0362804302698623e-05, |
| "loss": 0.2443, |
| "step": 35060 |
| }, |
| { |
| "epoch": 19.67, |
| "learning_rate": 2.035997357992074e-05, |
| "loss": 0.2262, |
| "step": 35070 |
| }, |
| { |
| "epoch": 19.67, |
| "learning_rate": 2.0357142857142858e-05, |
| "loss": 0.2122, |
| "step": 35080 |
| }, |
| { |
| "epoch": 19.68, |
| "learning_rate": 2.0354312134364974e-05, |
| "loss": 0.2405, |
| "step": 35090 |
| }, |
| { |
| "epoch": 19.69, |
| "learning_rate": 2.0351481411587093e-05, |
| "loss": 0.2285, |
| "step": 35100 |
| }, |
| { |
| "epoch": 19.69, |
| "learning_rate": 2.034865068880921e-05, |
| "loss": 0.22, |
| "step": 35110 |
| }, |
| { |
| "epoch": 19.7, |
| "learning_rate": 2.0345819966031328e-05, |
| "loss": 0.2166, |
| "step": 35120 |
| }, |
| { |
| "epoch": 19.7, |
| "learning_rate": 2.0342989243253444e-05, |
| "loss": 0.2107, |
| "step": 35130 |
| }, |
| { |
| "epoch": 19.71, |
| "learning_rate": 2.034015852047556e-05, |
| "loss": 0.236, |
| "step": 35140 |
| }, |
| { |
| "epoch": 19.71, |
| "learning_rate": 2.033732779769768e-05, |
| "loss": 0.2293, |
| "step": 35150 |
| }, |
| { |
| "epoch": 19.72, |
| "learning_rate": 2.0334497074919798e-05, |
| "loss": 0.228, |
| "step": 35160 |
| }, |
| { |
| "epoch": 19.72, |
| "learning_rate": 2.0331666352141914e-05, |
| "loss": 0.2244, |
| "step": 35170 |
| }, |
| { |
| "epoch": 19.73, |
| "learning_rate": 2.032883562936403e-05, |
| "loss": 0.2276, |
| "step": 35180 |
| }, |
| { |
| "epoch": 19.74, |
| "learning_rate": 2.032600490658615e-05, |
| "loss": 0.23, |
| "step": 35190 |
| }, |
| { |
| "epoch": 19.74, |
| "learning_rate": 2.0323174183808268e-05, |
| "loss": 0.2367, |
| "step": 35200 |
| }, |
| { |
| "epoch": 19.75, |
| "learning_rate": 2.0320343461030384e-05, |
| "loss": 0.2227, |
| "step": 35210 |
| }, |
| { |
| "epoch": 19.75, |
| "learning_rate": 2.03175127382525e-05, |
| "loss": 0.2355, |
| "step": 35220 |
| }, |
| { |
| "epoch": 19.76, |
| "learning_rate": 2.0314682015474616e-05, |
| "loss": 0.2049, |
| "step": 35230 |
| }, |
| { |
| "epoch": 19.76, |
| "learning_rate": 2.0311851292696738e-05, |
| "loss": 0.2285, |
| "step": 35240 |
| }, |
| { |
| "epoch": 19.77, |
| "learning_rate": 2.0309020569918854e-05, |
| "loss": 0.2513, |
| "step": 35250 |
| }, |
| { |
| "epoch": 19.78, |
| "learning_rate": 2.030618984714097e-05, |
| "loss": 0.2231, |
| "step": 35260 |
| }, |
| { |
| "epoch": 19.78, |
| "learning_rate": 2.0303359124363086e-05, |
| "loss": 0.2264, |
| "step": 35270 |
| }, |
| { |
| "epoch": 19.79, |
| "learning_rate": 2.0300528401585208e-05, |
| "loss": 0.2138, |
| "step": 35280 |
| }, |
| { |
| "epoch": 19.79, |
| "learning_rate": 2.0297697678807324e-05, |
| "loss": 0.2326, |
| "step": 35290 |
| }, |
| { |
| "epoch": 19.8, |
| "learning_rate": 2.029486695602944e-05, |
| "loss": 0.2303, |
| "step": 35300 |
| }, |
| { |
| "epoch": 19.8, |
| "learning_rate": 2.0292036233251556e-05, |
| "loss": 0.2205, |
| "step": 35310 |
| }, |
| { |
| "epoch": 19.81, |
| "learning_rate": 2.0289205510473675e-05, |
| "loss": 0.2335, |
| "step": 35320 |
| }, |
| { |
| "epoch": 19.81, |
| "learning_rate": 2.0286374787695794e-05, |
| "loss": 0.2084, |
| "step": 35330 |
| }, |
| { |
| "epoch": 19.82, |
| "learning_rate": 2.028354406491791e-05, |
| "loss": 0.2425, |
| "step": 35340 |
| }, |
| { |
| "epoch": 19.83, |
| "learning_rate": 2.0280713342140026e-05, |
| "loss": 0.249, |
| "step": 35350 |
| }, |
| { |
| "epoch": 19.83, |
| "learning_rate": 2.0277882619362145e-05, |
| "loss": 0.2257, |
| "step": 35360 |
| }, |
| { |
| "epoch": 19.84, |
| "learning_rate": 2.0275051896584264e-05, |
| "loss": 0.2373, |
| "step": 35370 |
| }, |
| { |
| "epoch": 19.84, |
| "learning_rate": 2.027222117380638e-05, |
| "loss": 0.2059, |
| "step": 35380 |
| }, |
| { |
| "epoch": 19.85, |
| "learning_rate": 2.0269390451028496e-05, |
| "loss": 0.231, |
| "step": 35390 |
| }, |
| { |
| "epoch": 19.85, |
| "learning_rate": 2.0266559728250615e-05, |
| "loss": 0.229, |
| "step": 35400 |
| }, |
| { |
| "epoch": 19.86, |
| "learning_rate": 2.026372900547273e-05, |
| "loss": 0.2301, |
| "step": 35410 |
| }, |
| { |
| "epoch": 19.87, |
| "learning_rate": 2.026089828269485e-05, |
| "loss": 0.2361, |
| "step": 35420 |
| }, |
| { |
| "epoch": 19.87, |
| "learning_rate": 2.0258067559916965e-05, |
| "loss": 0.2203, |
| "step": 35430 |
| }, |
| { |
| "epoch": 19.88, |
| "learning_rate": 2.025523683713908e-05, |
| "loss": 0.2267, |
| "step": 35440 |
| }, |
| { |
| "epoch": 19.88, |
| "learning_rate": 2.02524061143612e-05, |
| "loss": 0.2373, |
| "step": 35450 |
| }, |
| { |
| "epoch": 19.89, |
| "learning_rate": 2.024957539158332e-05, |
| "loss": 0.2392, |
| "step": 35460 |
| }, |
| { |
| "epoch": 19.89, |
| "learning_rate": 2.0246744668805435e-05, |
| "loss": 0.225, |
| "step": 35470 |
| }, |
| { |
| "epoch": 19.9, |
| "learning_rate": 2.024391394602755e-05, |
| "loss": 0.2071, |
| "step": 35480 |
| }, |
| { |
| "epoch": 19.9, |
| "learning_rate": 2.024108322324967e-05, |
| "loss": 0.2314, |
| "step": 35490 |
| }, |
| { |
| "epoch": 19.91, |
| "learning_rate": 2.0238252500471786e-05, |
| "loss": 0.2332, |
| "step": 35500 |
| }, |
| { |
| "epoch": 19.92, |
| "learning_rate": 2.0235421777693905e-05, |
| "loss": 0.2253, |
| "step": 35510 |
| }, |
| { |
| "epoch": 19.92, |
| "learning_rate": 2.023259105491602e-05, |
| "loss": 0.2246, |
| "step": 35520 |
| }, |
| { |
| "epoch": 19.93, |
| "learning_rate": 2.022976033213814e-05, |
| "loss": 0.2146, |
| "step": 35530 |
| }, |
| { |
| "epoch": 19.93, |
| "learning_rate": 2.0226929609360256e-05, |
| "loss": 0.2273, |
| "step": 35540 |
| }, |
| { |
| "epoch": 19.94, |
| "learning_rate": 2.0224098886582375e-05, |
| "loss": 0.2264, |
| "step": 35550 |
| }, |
| { |
| "epoch": 19.94, |
| "learning_rate": 2.022126816380449e-05, |
| "loss": 0.2476, |
| "step": 35560 |
| }, |
| { |
| "epoch": 19.95, |
| "learning_rate": 2.021843744102661e-05, |
| "loss": 0.225, |
| "step": 35570 |
| }, |
| { |
| "epoch": 19.95, |
| "learning_rate": 2.0215606718248726e-05, |
| "loss": 0.2198, |
| "step": 35580 |
| }, |
| { |
| "epoch": 19.96, |
| "learning_rate": 2.0212775995470842e-05, |
| "loss": 0.232, |
| "step": 35590 |
| }, |
| { |
| "epoch": 19.97, |
| "learning_rate": 2.020994527269296e-05, |
| "loss": 0.2294, |
| "step": 35600 |
| }, |
| { |
| "epoch": 19.97, |
| "learning_rate": 2.020711454991508e-05, |
| "loss": 0.2306, |
| "step": 35610 |
| }, |
| { |
| "epoch": 19.98, |
| "learning_rate": 2.0204283827137196e-05, |
| "loss": 0.2459, |
| "step": 35620 |
| }, |
| { |
| "epoch": 19.98, |
| "learning_rate": 2.0201453104359312e-05, |
| "loss": 0.2116, |
| "step": 35630 |
| }, |
| { |
| "epoch": 19.99, |
| "learning_rate": 2.019862238158143e-05, |
| "loss": 0.2301, |
| "step": 35640 |
| }, |
| { |
| "epoch": 19.99, |
| "learning_rate": 2.019579165880355e-05, |
| "loss": 0.2256, |
| "step": 35650 |
| }, |
| { |
| "epoch": 20.0, |
| "learning_rate": 2.0192960936025666e-05, |
| "loss": 0.2168, |
| "step": 35660 |
| }, |
| { |
| "epoch": 20.0, |
| "eval_cer": 0.16280520290551867, |
| "eval_loss": 0.41814926266670227, |
| "eval_runtime": 317.945, |
| "eval_samples_per_second": 16.742, |
| "eval_steps_per_second": 4.186, |
| "eval_wer": 0.22064882174622913, |
| "step": 35660 |
| }, |
| { |
| "epoch": 20.01, |
| "learning_rate": 2.0190130213247782e-05, |
| "loss": 0.2296, |
| "step": 35670 |
| }, |
| { |
| "epoch": 20.01, |
| "learning_rate": 2.0187299490469898e-05, |
| "loss": 0.2159, |
| "step": 35680 |
| }, |
| { |
| "epoch": 20.02, |
| "learning_rate": 2.018446876769202e-05, |
| "loss": 0.2298, |
| "step": 35690 |
| }, |
| { |
| "epoch": 20.02, |
| "learning_rate": 2.0181638044914136e-05, |
| "loss": 0.2073, |
| "step": 35700 |
| }, |
| { |
| "epoch": 20.03, |
| "learning_rate": 2.0178807322136252e-05, |
| "loss": 0.2013, |
| "step": 35710 |
| }, |
| { |
| "epoch": 20.03, |
| "learning_rate": 2.0175976599358368e-05, |
| "loss": 0.2196, |
| "step": 35720 |
| }, |
| { |
| "epoch": 20.04, |
| "learning_rate": 2.017314587658049e-05, |
| "loss": 0.2253, |
| "step": 35730 |
| }, |
| { |
| "epoch": 20.04, |
| "learning_rate": 2.0170315153802606e-05, |
| "loss": 0.2181, |
| "step": 35740 |
| }, |
| { |
| "epoch": 20.05, |
| "learning_rate": 2.0167484431024722e-05, |
| "loss": 0.2201, |
| "step": 35750 |
| }, |
| { |
| "epoch": 20.06, |
| "learning_rate": 2.0164653708246838e-05, |
| "loss": 0.1999, |
| "step": 35760 |
| }, |
| { |
| "epoch": 20.06, |
| "learning_rate": 2.0161822985468957e-05, |
| "loss": 0.2259, |
| "step": 35770 |
| }, |
| { |
| "epoch": 20.07, |
| "learning_rate": 2.0158992262691076e-05, |
| "loss": 0.2243, |
| "step": 35780 |
| }, |
| { |
| "epoch": 20.07, |
| "learning_rate": 2.0156161539913192e-05, |
| "loss": 0.2261, |
| "step": 35790 |
| }, |
| { |
| "epoch": 20.08, |
| "learning_rate": 2.0153330817135308e-05, |
| "loss": 0.2115, |
| "step": 35800 |
| }, |
| { |
| "epoch": 20.08, |
| "learning_rate": 2.0150500094357427e-05, |
| "loss": 0.187, |
| "step": 35810 |
| }, |
| { |
| "epoch": 20.09, |
| "learning_rate": 2.0147669371579546e-05, |
| "loss": 0.2259, |
| "step": 35820 |
| }, |
| { |
| "epoch": 20.1, |
| "learning_rate": 2.0144838648801662e-05, |
| "loss": 0.2452, |
| "step": 35830 |
| }, |
| { |
| "epoch": 20.1, |
| "learning_rate": 2.0142007926023778e-05, |
| "loss": 0.213, |
| "step": 35840 |
| }, |
| { |
| "epoch": 20.11, |
| "learning_rate": 2.0139177203245897e-05, |
| "loss": 0.2162, |
| "step": 35850 |
| }, |
| { |
| "epoch": 20.11, |
| "learning_rate": 2.0136346480468013e-05, |
| "loss": 0.1834, |
| "step": 35860 |
| }, |
| { |
| "epoch": 20.12, |
| "learning_rate": 2.0133515757690132e-05, |
| "loss": 0.2293, |
| "step": 35870 |
| }, |
| { |
| "epoch": 20.12, |
| "learning_rate": 2.0130685034912248e-05, |
| "loss": 0.2295, |
| "step": 35880 |
| }, |
| { |
| "epoch": 20.13, |
| "learning_rate": 2.0127854312134363e-05, |
| "loss": 0.2195, |
| "step": 35890 |
| }, |
| { |
| "epoch": 20.13, |
| "learning_rate": 2.0125023589356483e-05, |
| "loss": 0.215, |
| "step": 35900 |
| }, |
| { |
| "epoch": 20.14, |
| "learning_rate": 2.0122192866578602e-05, |
| "loss": 0.2016, |
| "step": 35910 |
| }, |
| { |
| "epoch": 20.15, |
| "learning_rate": 2.0119362143800718e-05, |
| "loss": 0.2371, |
| "step": 35920 |
| }, |
| { |
| "epoch": 20.15, |
| "learning_rate": 2.0116531421022833e-05, |
| "loss": 0.2262, |
| "step": 35930 |
| }, |
| { |
| "epoch": 20.16, |
| "learning_rate": 2.0113700698244953e-05, |
| "loss": 0.2194, |
| "step": 35940 |
| }, |
| { |
| "epoch": 20.16, |
| "learning_rate": 2.011086997546707e-05, |
| "loss": 0.213, |
| "step": 35950 |
| }, |
| { |
| "epoch": 20.17, |
| "learning_rate": 2.0108039252689188e-05, |
| "loss": 0.2126, |
| "step": 35960 |
| }, |
| { |
| "epoch": 20.17, |
| "learning_rate": 2.0105208529911303e-05, |
| "loss": 0.2247, |
| "step": 35970 |
| }, |
| { |
| "epoch": 20.18, |
| "learning_rate": 2.0102377807133423e-05, |
| "loss": 0.2191, |
| "step": 35980 |
| }, |
| { |
| "epoch": 20.19, |
| "learning_rate": 2.009954708435554e-05, |
| "loss": 0.2313, |
| "step": 35990 |
| }, |
| { |
| "epoch": 20.19, |
| "learning_rate": 2.0096716361577658e-05, |
| "loss": 0.2063, |
| "step": 36000 |
| }, |
| { |
| "epoch": 20.2, |
| "learning_rate": 2.0093885638799773e-05, |
| "loss": 0.195, |
| "step": 36010 |
| }, |
| { |
| "epoch": 20.2, |
| "learning_rate": 2.0091054916021893e-05, |
| "loss": 0.2292, |
| "step": 36020 |
| }, |
| { |
| "epoch": 20.21, |
| "learning_rate": 2.008822419324401e-05, |
| "loss": 0.2194, |
| "step": 36030 |
| }, |
| { |
| "epoch": 20.21, |
| "learning_rate": 2.0085393470466124e-05, |
| "loss": 0.216, |
| "step": 36040 |
| }, |
| { |
| "epoch": 20.22, |
| "learning_rate": 2.0082562747688243e-05, |
| "loss": 0.2275, |
| "step": 36050 |
| }, |
| { |
| "epoch": 20.22, |
| "learning_rate": 2.0079732024910363e-05, |
| "loss": 0.1944, |
| "step": 36060 |
| }, |
| { |
| "epoch": 20.23, |
| "learning_rate": 2.007690130213248e-05, |
| "loss": 0.2282, |
| "step": 36070 |
| }, |
| { |
| "epoch": 20.24, |
| "learning_rate": 2.0074070579354594e-05, |
| "loss": 0.2152, |
| "step": 36080 |
| }, |
| { |
| "epoch": 20.24, |
| "learning_rate": 2.0071239856576713e-05, |
| "loss": 0.2233, |
| "step": 36090 |
| }, |
| { |
| "epoch": 20.25, |
| "learning_rate": 2.0068409133798833e-05, |
| "loss": 0.2102, |
| "step": 36100 |
| }, |
| { |
| "epoch": 20.25, |
| "learning_rate": 2.006557841102095e-05, |
| "loss": 0.1914, |
| "step": 36110 |
| }, |
| { |
| "epoch": 20.26, |
| "learning_rate": 2.0062747688243064e-05, |
| "loss": 0.2414, |
| "step": 36120 |
| }, |
| { |
| "epoch": 20.26, |
| "learning_rate": 2.005991696546518e-05, |
| "loss": 0.2267, |
| "step": 36130 |
| }, |
| { |
| "epoch": 20.27, |
| "learning_rate": 2.0057086242687303e-05, |
| "loss": 0.2208, |
| "step": 36140 |
| }, |
| { |
| "epoch": 20.27, |
| "learning_rate": 2.0054255519909418e-05, |
| "loss": 0.2089, |
| "step": 36150 |
| }, |
| { |
| "epoch": 20.28, |
| "learning_rate": 2.0051424797131534e-05, |
| "loss": 0.2066, |
| "step": 36160 |
| }, |
| { |
| "epoch": 20.29, |
| "learning_rate": 2.004859407435365e-05, |
| "loss": 0.2205, |
| "step": 36170 |
| }, |
| { |
| "epoch": 20.29, |
| "learning_rate": 2.0045763351575772e-05, |
| "loss": 0.2239, |
| "step": 36180 |
| }, |
| { |
| "epoch": 20.3, |
| "learning_rate": 2.0042932628797888e-05, |
| "loss": 0.2344, |
| "step": 36190 |
| }, |
| { |
| "epoch": 20.3, |
| "learning_rate": 2.0040101906020004e-05, |
| "loss": 0.218, |
| "step": 36200 |
| }, |
| { |
| "epoch": 20.31, |
| "learning_rate": 2.003727118324212e-05, |
| "loss": 0.1988, |
| "step": 36210 |
| }, |
| { |
| "epoch": 20.31, |
| "learning_rate": 2.003444046046424e-05, |
| "loss": 0.2307, |
| "step": 36220 |
| }, |
| { |
| "epoch": 20.32, |
| "learning_rate": 2.0031609737686358e-05, |
| "loss": 0.2193, |
| "step": 36230 |
| }, |
| { |
| "epoch": 20.33, |
| "learning_rate": 2.0028779014908474e-05, |
| "loss": 0.2261, |
| "step": 36240 |
| }, |
| { |
| "epoch": 20.33, |
| "learning_rate": 2.002594829213059e-05, |
| "loss": 0.2208, |
| "step": 36250 |
| }, |
| { |
| "epoch": 20.34, |
| "learning_rate": 2.002311756935271e-05, |
| "loss": 0.2059, |
| "step": 36260 |
| }, |
| { |
| "epoch": 20.34, |
| "learning_rate": 2.0020286846574828e-05, |
| "loss": 0.2276, |
| "step": 36270 |
| }, |
| { |
| "epoch": 20.35, |
| "learning_rate": 2.0017456123796944e-05, |
| "loss": 0.2159, |
| "step": 36280 |
| }, |
| { |
| "epoch": 20.35, |
| "learning_rate": 2.001462540101906e-05, |
| "loss": 0.2169, |
| "step": 36290 |
| }, |
| { |
| "epoch": 20.36, |
| "learning_rate": 2.0011794678241176e-05, |
| "loss": 0.2229, |
| "step": 36300 |
| }, |
| { |
| "epoch": 20.36, |
| "learning_rate": 2.0008963955463295e-05, |
| "loss": 0.2063, |
| "step": 36310 |
| }, |
| { |
| "epoch": 20.37, |
| "learning_rate": 2.0006133232685414e-05, |
| "loss": 0.2304, |
| "step": 36320 |
| }, |
| { |
| "epoch": 20.38, |
| "learning_rate": 2.000330250990753e-05, |
| "loss": 0.2301, |
| "step": 36330 |
| }, |
| { |
| "epoch": 20.38, |
| "learning_rate": 2.0000471787129646e-05, |
| "loss": 0.227, |
| "step": 36340 |
| }, |
| { |
| "epoch": 20.39, |
| "learning_rate": 1.9997641064351765e-05, |
| "loss": 0.2178, |
| "step": 36350 |
| }, |
| { |
| "epoch": 20.39, |
| "learning_rate": 1.9994810341573884e-05, |
| "loss": 0.223, |
| "step": 36360 |
| }, |
| { |
| "epoch": 20.4, |
| "learning_rate": 1.9991979618796e-05, |
| "loss": 0.2341, |
| "step": 36370 |
| }, |
| { |
| "epoch": 20.4, |
| "learning_rate": 1.9989148896018116e-05, |
| "loss": 0.2232, |
| "step": 36380 |
| }, |
| { |
| "epoch": 20.41, |
| "learning_rate": 1.9986318173240235e-05, |
| "loss": 0.2103, |
| "step": 36390 |
| }, |
| { |
| "epoch": 20.41, |
| "learning_rate": 1.998348745046235e-05, |
| "loss": 0.2248, |
| "step": 36400 |
| }, |
| { |
| "epoch": 20.42, |
| "learning_rate": 1.998065672768447e-05, |
| "loss": 0.1798, |
| "step": 36410 |
| }, |
| { |
| "epoch": 20.43, |
| "learning_rate": 1.9977826004906586e-05, |
| "loss": 0.2219, |
| "step": 36420 |
| }, |
| { |
| "epoch": 20.43, |
| "learning_rate": 1.9974995282128705e-05, |
| "loss": 0.219, |
| "step": 36430 |
| }, |
| { |
| "epoch": 20.44, |
| "learning_rate": 1.997216455935082e-05, |
| "loss": 0.2259, |
| "step": 36440 |
| }, |
| { |
| "epoch": 20.44, |
| "learning_rate": 1.996933383657294e-05, |
| "loss": 0.2273, |
| "step": 36450 |
| }, |
| { |
| "epoch": 20.45, |
| "learning_rate": 1.9966503113795056e-05, |
| "loss": 0.1948, |
| "step": 36460 |
| }, |
| { |
| "epoch": 20.45, |
| "learning_rate": 1.9963672391017175e-05, |
| "loss": 0.2188, |
| "step": 36470 |
| }, |
| { |
| "epoch": 20.46, |
| "learning_rate": 1.996084166823929e-05, |
| "loss": 0.2246, |
| "step": 36480 |
| }, |
| { |
| "epoch": 20.47, |
| "learning_rate": 1.9958010945461406e-05, |
| "loss": 0.2286, |
| "step": 36490 |
| }, |
| { |
| "epoch": 20.47, |
| "learning_rate": 1.9955180222683526e-05, |
| "loss": 0.228, |
| "step": 36500 |
| }, |
| { |
| "epoch": 20.48, |
| "learning_rate": 1.9952349499905645e-05, |
| "loss": 0.2116, |
| "step": 36510 |
| }, |
| { |
| "epoch": 20.48, |
| "learning_rate": 1.994951877712776e-05, |
| "loss": 0.2381, |
| "step": 36520 |
| }, |
| { |
| "epoch": 20.49, |
| "learning_rate": 1.9946688054349876e-05, |
| "loss": 0.2265, |
| "step": 36530 |
| }, |
| { |
| "epoch": 20.49, |
| "learning_rate": 1.9943857331571996e-05, |
| "loss": 0.2164, |
| "step": 36540 |
| }, |
| { |
| "epoch": 20.5, |
| "learning_rate": 1.9941026608794115e-05, |
| "loss": 0.2236, |
| "step": 36550 |
| }, |
| { |
| "epoch": 20.5, |
| "learning_rate": 1.993819588601623e-05, |
| "loss": 0.2098, |
| "step": 36560 |
| }, |
| { |
| "epoch": 20.51, |
| "learning_rate": 1.9935365163238346e-05, |
| "loss": 0.2346, |
| "step": 36570 |
| }, |
| { |
| "epoch": 20.52, |
| "learning_rate": 1.9932534440460462e-05, |
| "loss": 0.2262, |
| "step": 36580 |
| }, |
| { |
| "epoch": 20.52, |
| "learning_rate": 1.9929703717682585e-05, |
| "loss": 0.2096, |
| "step": 36590 |
| }, |
| { |
| "epoch": 20.53, |
| "learning_rate": 1.99268729949047e-05, |
| "loss": 0.2286, |
| "step": 36600 |
| }, |
| { |
| "epoch": 20.53, |
| "learning_rate": 1.9924042272126816e-05, |
| "loss": 0.1836, |
| "step": 36610 |
| }, |
| { |
| "epoch": 20.54, |
| "learning_rate": 1.9921211549348932e-05, |
| "loss": 0.2192, |
| "step": 36620 |
| }, |
| { |
| "epoch": 20.54, |
| "learning_rate": 1.9918380826571055e-05, |
| "loss": 0.232, |
| "step": 36630 |
| }, |
| { |
| "epoch": 20.55, |
| "learning_rate": 1.991555010379317e-05, |
| "loss": 0.2285, |
| "step": 36640 |
| }, |
| { |
| "epoch": 20.56, |
| "learning_rate": 1.9912719381015286e-05, |
| "loss": 0.2187, |
| "step": 36650 |
| }, |
| { |
| "epoch": 20.56, |
| "learning_rate": 1.9909888658237402e-05, |
| "loss": 0.1913, |
| "step": 36660 |
| }, |
| { |
| "epoch": 20.57, |
| "learning_rate": 1.990705793545952e-05, |
| "loss": 0.2312, |
| "step": 36670 |
| }, |
| { |
| "epoch": 20.57, |
| "learning_rate": 1.990422721268164e-05, |
| "loss": 0.2323, |
| "step": 36680 |
| }, |
| { |
| "epoch": 20.58, |
| "learning_rate": 1.9901396489903756e-05, |
| "loss": 0.2201, |
| "step": 36690 |
| }, |
| { |
| "epoch": 20.58, |
| "learning_rate": 1.9898565767125872e-05, |
| "loss": 0.2156, |
| "step": 36700 |
| }, |
| { |
| "epoch": 20.59, |
| "learning_rate": 1.9895735044347988e-05, |
| "loss": 0.1995, |
| "step": 36710 |
| }, |
| { |
| "epoch": 20.59, |
| "learning_rate": 1.989290432157011e-05, |
| "loss": 0.2299, |
| "step": 36720 |
| }, |
| { |
| "epoch": 20.6, |
| "learning_rate": 1.9890073598792226e-05, |
| "loss": 0.2257, |
| "step": 36730 |
| }, |
| { |
| "epoch": 20.61, |
| "learning_rate": 1.9887242876014342e-05, |
| "loss": 0.215, |
| "step": 36740 |
| }, |
| { |
| "epoch": 20.61, |
| "learning_rate": 1.9884412153236458e-05, |
| "loss": 0.2163, |
| "step": 36750 |
| }, |
| { |
| "epoch": 20.62, |
| "learning_rate": 1.9881581430458577e-05, |
| "loss": 0.2072, |
| "step": 36760 |
| }, |
| { |
| "epoch": 20.62, |
| "learning_rate": 1.9878750707680696e-05, |
| "loss": 0.2345, |
| "step": 36770 |
| }, |
| { |
| "epoch": 20.63, |
| "learning_rate": 1.9875919984902812e-05, |
| "loss": 0.2161, |
| "step": 36780 |
| }, |
| { |
| "epoch": 20.63, |
| "learning_rate": 1.9873089262124928e-05, |
| "loss": 0.2194, |
| "step": 36790 |
| }, |
| { |
| "epoch": 20.64, |
| "learning_rate": 1.9870258539347047e-05, |
| "loss": 0.2205, |
| "step": 36800 |
| }, |
| { |
| "epoch": 20.64, |
| "learning_rate": 1.9867427816569166e-05, |
| "loss": 0.1985, |
| "step": 36810 |
| }, |
| { |
| "epoch": 20.65, |
| "learning_rate": 1.9864597093791282e-05, |
| "loss": 0.24, |
| "step": 36820 |
| }, |
| { |
| "epoch": 20.66, |
| "learning_rate": 1.9861766371013398e-05, |
| "loss": 0.2303, |
| "step": 36830 |
| }, |
| { |
| "epoch": 20.66, |
| "learning_rate": 1.9858935648235517e-05, |
| "loss": 0.2239, |
| "step": 36840 |
| }, |
| { |
| "epoch": 20.67, |
| "learning_rate": 1.9856104925457633e-05, |
| "loss": 0.2138, |
| "step": 36850 |
| }, |
| { |
| "epoch": 20.67, |
| "learning_rate": 1.9853274202679752e-05, |
| "loss": 0.1941, |
| "step": 36860 |
| }, |
| { |
| "epoch": 20.68, |
| "learning_rate": 1.9850443479901868e-05, |
| "loss": 0.2285, |
| "step": 36870 |
| }, |
| { |
| "epoch": 20.68, |
| "learning_rate": 1.9847612757123987e-05, |
| "loss": 0.2225, |
| "step": 36880 |
| }, |
| { |
| "epoch": 20.69, |
| "learning_rate": 1.9844782034346103e-05, |
| "loss": 0.23, |
| "step": 36890 |
| }, |
| { |
| "epoch": 20.7, |
| "learning_rate": 1.9841951311568222e-05, |
| "loss": 0.2282, |
| "step": 36900 |
| }, |
| { |
| "epoch": 20.7, |
| "learning_rate": 1.9839120588790338e-05, |
| "loss": 0.1917, |
| "step": 36910 |
| }, |
| { |
| "epoch": 20.71, |
| "learning_rate": 1.9836289866012457e-05, |
| "loss": 0.2296, |
| "step": 36920 |
| }, |
| { |
| "epoch": 20.71, |
| "learning_rate": 1.9833459143234573e-05, |
| "loss": 0.2293, |
| "step": 36930 |
| }, |
| { |
| "epoch": 20.72, |
| "learning_rate": 1.983062842045669e-05, |
| "loss": 0.2224, |
| "step": 36940 |
| }, |
| { |
| "epoch": 20.72, |
| "learning_rate": 1.9827797697678808e-05, |
| "loss": 0.2153, |
| "step": 36950 |
| }, |
| { |
| "epoch": 20.73, |
| "learning_rate": 1.9824966974900927e-05, |
| "loss": 0.2117, |
| "step": 36960 |
| }, |
| { |
| "epoch": 20.73, |
| "learning_rate": 1.9822136252123043e-05, |
| "loss": 0.232, |
| "step": 36970 |
| }, |
| { |
| "epoch": 20.74, |
| "learning_rate": 1.981930552934516e-05, |
| "loss": 0.217, |
| "step": 36980 |
| }, |
| { |
| "epoch": 20.75, |
| "learning_rate": 1.9816474806567278e-05, |
| "loss": 0.2217, |
| "step": 36990 |
| }, |
| { |
| "epoch": 20.75, |
| "learning_rate": 1.9813644083789397e-05, |
| "loss": 0.2186, |
| "step": 37000 |
| }, |
| { |
| "epoch": 20.76, |
| "learning_rate": 1.9810813361011513e-05, |
| "loss": 0.2026, |
| "step": 37010 |
| }, |
| { |
| "epoch": 20.76, |
| "learning_rate": 1.980798263823363e-05, |
| "loss": 0.2337, |
| "step": 37020 |
| }, |
| { |
| "epoch": 20.77, |
| "learning_rate": 1.9805151915455744e-05, |
| "loss": 0.2307, |
| "step": 37030 |
| }, |
| { |
| "epoch": 20.77, |
| "learning_rate": 1.9802321192677867e-05, |
| "loss": 0.2227, |
| "step": 37040 |
| }, |
| { |
| "epoch": 20.78, |
| "learning_rate": 1.9799490469899983e-05, |
| "loss": 0.2046, |
| "step": 37050 |
| }, |
| { |
| "epoch": 20.78, |
| "learning_rate": 1.97966597471221e-05, |
| "loss": 0.1887, |
| "step": 37060 |
| }, |
| { |
| "epoch": 20.79, |
| "learning_rate": 1.9793829024344214e-05, |
| "loss": 0.2291, |
| "step": 37070 |
| }, |
| { |
| "epoch": 20.8, |
| "learning_rate": 1.9790998301566337e-05, |
| "loss": 0.2178, |
| "step": 37080 |
| }, |
| { |
| "epoch": 20.8, |
| "learning_rate": 1.9788167578788453e-05, |
| "loss": 0.2211, |
| "step": 37090 |
| }, |
| { |
| "epoch": 20.81, |
| "learning_rate": 1.978533685601057e-05, |
| "loss": 0.2121, |
| "step": 37100 |
| }, |
| { |
| "epoch": 20.81, |
| "learning_rate": 1.9782506133232684e-05, |
| "loss": 0.1975, |
| "step": 37110 |
| }, |
| { |
| "epoch": 20.82, |
| "learning_rate": 1.97796754104548e-05, |
| "loss": 0.2295, |
| "step": 37120 |
| }, |
| { |
| "epoch": 20.82, |
| "learning_rate": 1.9776844687676923e-05, |
| "loss": 0.2294, |
| "step": 37130 |
| }, |
| { |
| "epoch": 20.83, |
| "learning_rate": 1.977401396489904e-05, |
| "loss": 0.2258, |
| "step": 37140 |
| }, |
| { |
| "epoch": 20.84, |
| "learning_rate": 1.9771183242121154e-05, |
| "loss": 0.2083, |
| "step": 37150 |
| }, |
| { |
| "epoch": 20.84, |
| "learning_rate": 1.976835251934327e-05, |
| "loss": 0.1888, |
| "step": 37160 |
| }, |
| { |
| "epoch": 20.85, |
| "learning_rate": 1.9765521796565393e-05, |
| "loss": 0.2356, |
| "step": 37170 |
| }, |
| { |
| "epoch": 20.85, |
| "learning_rate": 1.976269107378751e-05, |
| "loss": 0.2293, |
| "step": 37180 |
| }, |
| { |
| "epoch": 20.86, |
| "learning_rate": 1.9759860351009624e-05, |
| "loss": 0.2237, |
| "step": 37190 |
| }, |
| { |
| "epoch": 20.86, |
| "learning_rate": 1.975702962823174e-05, |
| "loss": 0.23, |
| "step": 37200 |
| }, |
| { |
| "epoch": 20.87, |
| "learning_rate": 1.975419890545386e-05, |
| "loss": 0.2015, |
| "step": 37210 |
| }, |
| { |
| "epoch": 20.87, |
| "learning_rate": 1.975136818267598e-05, |
| "loss": 0.2271, |
| "step": 37220 |
| }, |
| { |
| "epoch": 20.88, |
| "learning_rate": 1.9748537459898094e-05, |
| "loss": 0.2373, |
| "step": 37230 |
| }, |
| { |
| "epoch": 20.89, |
| "learning_rate": 1.974570673712021e-05, |
| "loss": 0.2182, |
| "step": 37240 |
| }, |
| { |
| "epoch": 20.89, |
| "learning_rate": 1.974287601434233e-05, |
| "loss": 0.218, |
| "step": 37250 |
| }, |
| { |
| "epoch": 20.9, |
| "learning_rate": 1.974004529156445e-05, |
| "loss": 0.2029, |
| "step": 37260 |
| }, |
| { |
| "epoch": 20.9, |
| "learning_rate": 1.9737214568786564e-05, |
| "loss": 0.2335, |
| "step": 37270 |
| }, |
| { |
| "epoch": 20.91, |
| "learning_rate": 1.973438384600868e-05, |
| "loss": 0.2248, |
| "step": 37280 |
| }, |
| { |
| "epoch": 20.91, |
| "learning_rate": 1.97315531232308e-05, |
| "loss": 0.2196, |
| "step": 37290 |
| }, |
| { |
| "epoch": 20.92, |
| "learning_rate": 1.9728722400452915e-05, |
| "loss": 0.227, |
| "step": 37300 |
| }, |
| { |
| "epoch": 20.93, |
| "learning_rate": 1.9725891677675034e-05, |
| "loss": 0.2173, |
| "step": 37310 |
| }, |
| { |
| "epoch": 20.93, |
| "learning_rate": 1.972306095489715e-05, |
| "loss": 0.2351, |
| "step": 37320 |
| }, |
| { |
| "epoch": 20.94, |
| "learning_rate": 1.972023023211927e-05, |
| "loss": 0.2163, |
| "step": 37330 |
| }, |
| { |
| "epoch": 20.94, |
| "learning_rate": 1.9717399509341385e-05, |
| "loss": 0.2334, |
| "step": 37340 |
| }, |
| { |
| "epoch": 20.95, |
| "learning_rate": 1.9714568786563504e-05, |
| "loss": 0.2323, |
| "step": 37350 |
| }, |
| { |
| "epoch": 20.95, |
| "learning_rate": 1.971173806378562e-05, |
| "loss": 0.1924, |
| "step": 37360 |
| }, |
| { |
| "epoch": 20.96, |
| "learning_rate": 1.970890734100774e-05, |
| "loss": 0.2401, |
| "step": 37370 |
| }, |
| { |
| "epoch": 20.96, |
| "learning_rate": 1.9706076618229855e-05, |
| "loss": 0.2237, |
| "step": 37380 |
| }, |
| { |
| "epoch": 20.97, |
| "learning_rate": 1.970324589545197e-05, |
| "loss": 0.2268, |
| "step": 37390 |
| }, |
| { |
| "epoch": 20.98, |
| "learning_rate": 1.970041517267409e-05, |
| "loss": 0.2098, |
| "step": 37400 |
| }, |
| { |
| "epoch": 20.98, |
| "learning_rate": 1.969758444989621e-05, |
| "loss": 0.2112, |
| "step": 37410 |
| }, |
| { |
| "epoch": 20.99, |
| "learning_rate": 1.9694753727118325e-05, |
| "loss": 0.2401, |
| "step": 37420 |
| }, |
| { |
| "epoch": 20.99, |
| "learning_rate": 1.969192300434044e-05, |
| "loss": 0.2288, |
| "step": 37430 |
| }, |
| { |
| "epoch": 21.0, |
| "learning_rate": 1.968909228156256e-05, |
| "loss": 0.2314, |
| "step": 37440 |
| }, |
| { |
| "epoch": 21.0, |
| "eval_cer": 0.1619837794389817, |
| "eval_loss": 0.4210527241230011, |
| "eval_runtime": 317.5775, |
| "eval_samples_per_second": 16.761, |
| "eval_steps_per_second": 4.191, |
| "eval_wer": 0.22019397871965632, |
| "step": 37443 |
| }, |
| { |
| "epoch": 21.0, |
| "learning_rate": 1.968626155878468e-05, |
| "loss": 0.2242, |
| "step": 37450 |
| }, |
| { |
| "epoch": 21.01, |
| "learning_rate": 1.9683430836006795e-05, |
| "loss": 0.2209, |
| "step": 37460 |
| }, |
| { |
| "epoch": 21.02, |
| "learning_rate": 1.968060011322891e-05, |
| "loss": 0.1989, |
| "step": 37470 |
| }, |
| { |
| "epoch": 21.02, |
| "learning_rate": 1.9677769390451026e-05, |
| "loss": 0.2062, |
| "step": 37480 |
| }, |
| { |
| "epoch": 21.03, |
| "learning_rate": 1.967493866767315e-05, |
| "loss": 0.202, |
| "step": 37490 |
| }, |
| { |
| "epoch": 21.03, |
| "learning_rate": 1.9672107944895265e-05, |
| "loss": 0.2133, |
| "step": 37500 |
| }, |
| { |
| "epoch": 21.04, |
| "learning_rate": 1.966927722211738e-05, |
| "loss": 0.2174, |
| "step": 37510 |
| }, |
| { |
| "epoch": 21.04, |
| "learning_rate": 1.9666446499339496e-05, |
| "loss": 0.2154, |
| "step": 37520 |
| }, |
| { |
| "epoch": 21.05, |
| "learning_rate": 1.9663615776561616e-05, |
| "loss": 0.208, |
| "step": 37530 |
| }, |
| { |
| "epoch": 21.05, |
| "learning_rate": 1.9660785053783735e-05, |
| "loss": 0.2064, |
| "step": 37540 |
| }, |
| { |
| "epoch": 21.06, |
| "learning_rate": 1.965795433100585e-05, |
| "loss": 0.2032, |
| "step": 37550 |
| }, |
| { |
| "epoch": 21.07, |
| "learning_rate": 1.9655123608227966e-05, |
| "loss": 0.2123, |
| "step": 37560 |
| }, |
| { |
| "epoch": 21.07, |
| "learning_rate": 1.9652292885450082e-05, |
| "loss": 0.214, |
| "step": 37570 |
| }, |
| { |
| "epoch": 21.08, |
| "learning_rate": 1.9649462162672205e-05, |
| "loss": 0.2143, |
| "step": 37580 |
| }, |
| { |
| "epoch": 21.08, |
| "learning_rate": 1.964663143989432e-05, |
| "loss": 0.1944, |
| "step": 37590 |
| }, |
| { |
| "epoch": 21.09, |
| "learning_rate": 1.9643800717116436e-05, |
| "loss": 0.2074, |
| "step": 37600 |
| }, |
| { |
| "epoch": 21.09, |
| "learning_rate": 1.9640969994338552e-05, |
| "loss": 0.214, |
| "step": 37610 |
| }, |
| { |
| "epoch": 21.1, |
| "learning_rate": 1.9638139271560675e-05, |
| "loss": 0.2218, |
| "step": 37620 |
| }, |
| { |
| "epoch": 21.1, |
| "learning_rate": 1.963530854878279e-05, |
| "loss": 0.2124, |
| "step": 37630 |
| }, |
| { |
| "epoch": 21.11, |
| "learning_rate": 1.9632477826004906e-05, |
| "loss": 0.2107, |
| "step": 37640 |
| }, |
| { |
| "epoch": 21.12, |
| "learning_rate": 1.9629647103227022e-05, |
| "loss": 0.2093, |
| "step": 37650 |
| }, |
| { |
| "epoch": 21.12, |
| "learning_rate": 1.962681638044914e-05, |
| "loss": 0.222, |
| "step": 37660 |
| }, |
| { |
| "epoch": 21.13, |
| "learning_rate": 1.962398565767126e-05, |
| "loss": 0.2158, |
| "step": 37670 |
| }, |
| { |
| "epoch": 21.13, |
| "learning_rate": 1.9621154934893376e-05, |
| "loss": 0.2176, |
| "step": 37680 |
| }, |
| { |
| "epoch": 21.14, |
| "learning_rate": 1.9618324212115492e-05, |
| "loss": 0.2134, |
| "step": 37690 |
| }, |
| { |
| "epoch": 21.14, |
| "learning_rate": 1.961549348933761e-05, |
| "loss": 0.2064, |
| "step": 37700 |
| }, |
| { |
| "epoch": 21.15, |
| "learning_rate": 1.961266276655973e-05, |
| "loss": 0.2071, |
| "step": 37710 |
| }, |
| { |
| "epoch": 21.16, |
| "learning_rate": 1.9609832043781846e-05, |
| "loss": 0.2054, |
| "step": 37720 |
| }, |
| { |
| "epoch": 21.16, |
| "learning_rate": 1.9607001321003962e-05, |
| "loss": 0.2231, |
| "step": 37730 |
| }, |
| { |
| "epoch": 21.17, |
| "learning_rate": 1.960417059822608e-05, |
| "loss": 0.2056, |
| "step": 37740 |
| }, |
| { |
| "epoch": 21.17, |
| "learning_rate": 1.9601339875448197e-05, |
| "loss": 0.1886, |
| "step": 37750 |
| }, |
| { |
| "epoch": 21.18, |
| "learning_rate": 1.9598509152670316e-05, |
| "loss": 0.2239, |
| "step": 37760 |
| }, |
| { |
| "epoch": 21.18, |
| "learning_rate": 1.9595678429892432e-05, |
| "loss": 0.2192, |
| "step": 37770 |
| }, |
| { |
| "epoch": 21.19, |
| "learning_rate": 1.959284770711455e-05, |
| "loss": 0.2094, |
| "step": 37780 |
| }, |
| { |
| "epoch": 21.19, |
| "learning_rate": 1.9590016984336667e-05, |
| "loss": 0.202, |
| "step": 37790 |
| }, |
| { |
| "epoch": 21.2, |
| "learning_rate": 1.9587186261558786e-05, |
| "loss": 0.2036, |
| "step": 37800 |
| }, |
| { |
| "epoch": 21.21, |
| "learning_rate": 1.9584355538780902e-05, |
| "loss": 0.2271, |
| "step": 37810 |
| }, |
| { |
| "epoch": 21.21, |
| "learning_rate": 1.958152481600302e-05, |
| "loss": 0.2095, |
| "step": 37820 |
| }, |
| { |
| "epoch": 21.22, |
| "learning_rate": 1.9578694093225137e-05, |
| "loss": 0.2067, |
| "step": 37830 |
| }, |
| { |
| "epoch": 21.22, |
| "learning_rate": 1.9575863370447253e-05, |
| "loss": 0.1992, |
| "step": 37840 |
| }, |
| { |
| "epoch": 21.23, |
| "learning_rate": 1.9573032647669372e-05, |
| "loss": 0.205, |
| "step": 37850 |
| }, |
| { |
| "epoch": 21.23, |
| "learning_rate": 1.957020192489149e-05, |
| "loss": 0.2251, |
| "step": 37860 |
| }, |
| { |
| "epoch": 21.24, |
| "learning_rate": 1.9567371202113607e-05, |
| "loss": 0.2073, |
| "step": 37870 |
| }, |
| { |
| "epoch": 21.24, |
| "learning_rate": 1.9564540479335723e-05, |
| "loss": 0.2193, |
| "step": 37880 |
| }, |
| { |
| "epoch": 21.25, |
| "learning_rate": 1.9561709756557842e-05, |
| "loss": 0.212, |
| "step": 37890 |
| }, |
| { |
| "epoch": 21.26, |
| "learning_rate": 1.955887903377996e-05, |
| "loss": 0.223, |
| "step": 37900 |
| }, |
| { |
| "epoch": 21.26, |
| "learning_rate": 1.9556048311002077e-05, |
| "loss": 0.2074, |
| "step": 37910 |
| }, |
| { |
| "epoch": 21.27, |
| "learning_rate": 1.9553217588224193e-05, |
| "loss": 0.2105, |
| "step": 37920 |
| }, |
| { |
| "epoch": 21.27, |
| "learning_rate": 1.955038686544631e-05, |
| "loss": 0.1995, |
| "step": 37930 |
| }, |
| { |
| "epoch": 21.28, |
| "learning_rate": 1.954755614266843e-05, |
| "loss": 0.2078, |
| "step": 37940 |
| }, |
| { |
| "epoch": 21.28, |
| "learning_rate": 1.9544725419890547e-05, |
| "loss": 0.1872, |
| "step": 37950 |
| }, |
| { |
| "epoch": 21.29, |
| "learning_rate": 1.9541894697112663e-05, |
| "loss": 0.2209, |
| "step": 37960 |
| }, |
| { |
| "epoch": 21.3, |
| "learning_rate": 1.953906397433478e-05, |
| "loss": 0.2343, |
| "step": 37970 |
| }, |
| { |
| "epoch": 21.3, |
| "learning_rate": 1.9536233251556898e-05, |
| "loss": 0.2034, |
| "step": 37980 |
| }, |
| { |
| "epoch": 21.31, |
| "learning_rate": 1.9533402528779017e-05, |
| "loss": 0.1984, |
| "step": 37990 |
| }, |
| { |
| "epoch": 21.31, |
| "learning_rate": 1.9530571806001133e-05, |
| "loss": 0.2232, |
| "step": 38000 |
| }, |
| { |
| "epoch": 21.32, |
| "learning_rate": 1.952774108322325e-05, |
| "loss": 0.2205, |
| "step": 38010 |
| }, |
| { |
| "epoch": 21.32, |
| "learning_rate": 1.9524910360445368e-05, |
| "loss": 0.2137, |
| "step": 38020 |
| }, |
| { |
| "epoch": 21.33, |
| "learning_rate": 1.9522079637667487e-05, |
| "loss": 0.2082, |
| "step": 38030 |
| }, |
| { |
| "epoch": 21.33, |
| "learning_rate": 1.9519248914889603e-05, |
| "loss": 0.202, |
| "step": 38040 |
| }, |
| { |
| "epoch": 21.34, |
| "learning_rate": 1.951641819211172e-05, |
| "loss": 0.2002, |
| "step": 38050 |
| }, |
| { |
| "epoch": 21.35, |
| "learning_rate": 1.9513587469333834e-05, |
| "loss": 0.2183, |
| "step": 38060 |
| }, |
| { |
| "epoch": 21.35, |
| "learning_rate": 1.9510756746555957e-05, |
| "loss": 0.2224, |
| "step": 38070 |
| }, |
| { |
| "epoch": 21.36, |
| "learning_rate": 1.9507926023778073e-05, |
| "loss": 0.2146, |
| "step": 38080 |
| }, |
| { |
| "epoch": 21.36, |
| "learning_rate": 1.950509530100019e-05, |
| "loss": 0.2074, |
| "step": 38090 |
| }, |
| { |
| "epoch": 21.37, |
| "learning_rate": 1.9502264578222304e-05, |
| "loss": 0.2022, |
| "step": 38100 |
| }, |
| { |
| "epoch": 21.37, |
| "learning_rate": 1.9499433855444423e-05, |
| "loss": 0.2201, |
| "step": 38110 |
| }, |
| { |
| "epoch": 21.38, |
| "learning_rate": 1.9496603132666543e-05, |
| "loss": 0.2126, |
| "step": 38120 |
| }, |
| { |
| "epoch": 21.39, |
| "learning_rate": 1.949377240988866e-05, |
| "loss": 0.2167, |
| "step": 38130 |
| }, |
| { |
| "epoch": 21.39, |
| "learning_rate": 1.9490941687110774e-05, |
| "loss": 0.2016, |
| "step": 38140 |
| }, |
| { |
| "epoch": 21.4, |
| "learning_rate": 1.9488110964332893e-05, |
| "loss": 0.2057, |
| "step": 38150 |
| }, |
| { |
| "epoch": 21.4, |
| "learning_rate": 1.9485280241555013e-05, |
| "loss": 0.2118, |
| "step": 38160 |
| }, |
| { |
| "epoch": 21.41, |
| "learning_rate": 1.948244951877713e-05, |
| "loss": 0.212, |
| "step": 38170 |
| }, |
| { |
| "epoch": 21.41, |
| "learning_rate": 1.9479618795999244e-05, |
| "loss": 0.2099, |
| "step": 38180 |
| }, |
| { |
| "epoch": 21.42, |
| "learning_rate": 1.9476788073221363e-05, |
| "loss": 0.2032, |
| "step": 38190 |
| }, |
| { |
| "epoch": 21.42, |
| "learning_rate": 1.947395735044348e-05, |
| "loss": 0.2125, |
| "step": 38200 |
| }, |
| { |
| "epoch": 21.43, |
| "learning_rate": 1.94711266276656e-05, |
| "loss": 0.2295, |
| "step": 38210 |
| }, |
| { |
| "epoch": 21.44, |
| "learning_rate": 1.9468295904887714e-05, |
| "loss": 0.2289, |
| "step": 38220 |
| }, |
| { |
| "epoch": 21.44, |
| "learning_rate": 1.9465465182109833e-05, |
| "loss": 0.2188, |
| "step": 38230 |
| }, |
| { |
| "epoch": 21.45, |
| "learning_rate": 1.946263445933195e-05, |
| "loss": 0.2024, |
| "step": 38240 |
| }, |
| { |
| "epoch": 21.45, |
| "learning_rate": 1.945980373655407e-05, |
| "loss": 0.2148, |
| "step": 38250 |
| }, |
| { |
| "epoch": 21.46, |
| "learning_rate": 1.9456973013776184e-05, |
| "loss": 0.2228, |
| "step": 38260 |
| }, |
| { |
| "epoch": 21.46, |
| "learning_rate": 1.9454142290998303e-05, |
| "loss": 0.21, |
| "step": 38270 |
| }, |
| { |
| "epoch": 21.47, |
| "learning_rate": 1.945131156822042e-05, |
| "loss": 0.2123, |
| "step": 38280 |
| }, |
| { |
| "epoch": 21.47, |
| "learning_rate": 1.9448480845442535e-05, |
| "loss": 0.2177, |
| "step": 38290 |
| }, |
| { |
| "epoch": 21.48, |
| "learning_rate": 1.9445650122664654e-05, |
| "loss": 0.2052, |
| "step": 38300 |
| }, |
| { |
| "epoch": 21.49, |
| "learning_rate": 1.9442819399886773e-05, |
| "loss": 0.2263, |
| "step": 38310 |
| }, |
| { |
| "epoch": 21.49, |
| "learning_rate": 1.943998867710889e-05, |
| "loss": 0.2281, |
| "step": 38320 |
| }, |
| { |
| "epoch": 21.5, |
| "learning_rate": 1.9437157954331005e-05, |
| "loss": 0.2089, |
| "step": 38330 |
| }, |
| { |
| "epoch": 21.5, |
| "learning_rate": 1.9434327231553124e-05, |
| "loss": 0.205, |
| "step": 38340 |
| }, |
| { |
| "epoch": 21.51, |
| "learning_rate": 1.9431496508775243e-05, |
| "loss": 0.2009, |
| "step": 38350 |
| }, |
| { |
| "epoch": 21.51, |
| "learning_rate": 1.942866578599736e-05, |
| "loss": 0.2308, |
| "step": 38360 |
| }, |
| { |
| "epoch": 21.52, |
| "learning_rate": 1.9425835063219475e-05, |
| "loss": 0.2018, |
| "step": 38370 |
| }, |
| { |
| "epoch": 21.53, |
| "learning_rate": 1.9423004340441594e-05, |
| "loss": 0.2, |
| "step": 38380 |
| }, |
| { |
| "epoch": 21.53, |
| "learning_rate": 1.942017361766371e-05, |
| "loss": 0.211, |
| "step": 38390 |
| }, |
| { |
| "epoch": 21.54, |
| "learning_rate": 1.941734289488583e-05, |
| "loss": 0.2133, |
| "step": 38400 |
| }, |
| { |
| "epoch": 21.54, |
| "learning_rate": 1.9414512172107945e-05, |
| "loss": 0.2246, |
| "step": 38410 |
| }, |
| { |
| "epoch": 21.55, |
| "learning_rate": 1.941168144933006e-05, |
| "loss": 0.2245, |
| "step": 38420 |
| }, |
| { |
| "epoch": 21.55, |
| "learning_rate": 1.940885072655218e-05, |
| "loss": 0.2315, |
| "step": 38430 |
| }, |
| { |
| "epoch": 21.56, |
| "learning_rate": 1.94060200037743e-05, |
| "loss": 0.1989, |
| "step": 38440 |
| }, |
| { |
| "epoch": 21.56, |
| "learning_rate": 1.9403189280996415e-05, |
| "loss": 0.1998, |
| "step": 38450 |
| }, |
| { |
| "epoch": 21.57, |
| "learning_rate": 1.940035855821853e-05, |
| "loss": 0.221, |
| "step": 38460 |
| }, |
| { |
| "epoch": 21.58, |
| "learning_rate": 1.939752783544065e-05, |
| "loss": 0.2194, |
| "step": 38470 |
| }, |
| { |
| "epoch": 21.58, |
| "learning_rate": 1.939469711266277e-05, |
| "loss": 0.2154, |
| "step": 38480 |
| }, |
| { |
| "epoch": 21.59, |
| "learning_rate": 1.9391866389884885e-05, |
| "loss": 0.2198, |
| "step": 38490 |
| }, |
| { |
| "epoch": 21.59, |
| "learning_rate": 1.9389035667107e-05, |
| "loss": 0.2025, |
| "step": 38500 |
| }, |
| { |
| "epoch": 21.6, |
| "learning_rate": 1.9386204944329116e-05, |
| "loss": 0.2095, |
| "step": 38510 |
| }, |
| { |
| "epoch": 21.6, |
| "learning_rate": 1.938337422155124e-05, |
| "loss": 0.2084, |
| "step": 38520 |
| }, |
| { |
| "epoch": 21.61, |
| "learning_rate": 1.9380543498773355e-05, |
| "loss": 0.2094, |
| "step": 38530 |
| }, |
| { |
| "epoch": 21.61, |
| "learning_rate": 1.937771277599547e-05, |
| "loss": 0.2055, |
| "step": 38540 |
| }, |
| { |
| "epoch": 21.62, |
| "learning_rate": 1.9374882053217586e-05, |
| "loss": 0.2014, |
| "step": 38550 |
| }, |
| { |
| "epoch": 21.63, |
| "learning_rate": 1.9372051330439706e-05, |
| "loss": 0.2232, |
| "step": 38560 |
| }, |
| { |
| "epoch": 21.63, |
| "learning_rate": 1.9369220607661825e-05, |
| "loss": 0.2094, |
| "step": 38570 |
| }, |
| { |
| "epoch": 21.64, |
| "learning_rate": 1.936638988488394e-05, |
| "loss": 0.2081, |
| "step": 38580 |
| }, |
| { |
| "epoch": 21.64, |
| "learning_rate": 1.9363559162106056e-05, |
| "loss": 0.2042, |
| "step": 38590 |
| }, |
| { |
| "epoch": 21.65, |
| "learning_rate": 1.9360728439328176e-05, |
| "loss": 0.2189, |
| "step": 38600 |
| }, |
| { |
| "epoch": 21.65, |
| "learning_rate": 1.9357897716550295e-05, |
| "loss": 0.2134, |
| "step": 38610 |
| }, |
| { |
| "epoch": 21.66, |
| "learning_rate": 1.935506699377241e-05, |
| "loss": 0.2227, |
| "step": 38620 |
| }, |
| { |
| "epoch": 21.67, |
| "learning_rate": 1.9352236270994526e-05, |
| "loss": 0.2217, |
| "step": 38630 |
| }, |
| { |
| "epoch": 21.67, |
| "learning_rate": 1.9349405548216646e-05, |
| "loss": 0.2179, |
| "step": 38640 |
| }, |
| { |
| "epoch": 21.68, |
| "learning_rate": 1.9346574825438765e-05, |
| "loss": 0.1989, |
| "step": 38650 |
| }, |
| { |
| "epoch": 21.68, |
| "learning_rate": 1.934374410266088e-05, |
| "loss": 0.2264, |
| "step": 38660 |
| }, |
| { |
| "epoch": 21.69, |
| "learning_rate": 1.9340913379882996e-05, |
| "loss": 0.2182, |
| "step": 38670 |
| }, |
| { |
| "epoch": 21.69, |
| "learning_rate": 1.9338082657105116e-05, |
| "loss": 0.2136, |
| "step": 38680 |
| }, |
| { |
| "epoch": 21.7, |
| "learning_rate": 1.933525193432723e-05, |
| "loss": 0.215, |
| "step": 38690 |
| }, |
| { |
| "epoch": 21.7, |
| "learning_rate": 1.933242121154935e-05, |
| "loss": 0.2098, |
| "step": 38700 |
| }, |
| { |
| "epoch": 21.71, |
| "learning_rate": 1.9329590488771466e-05, |
| "loss": 0.2237, |
| "step": 38710 |
| }, |
| { |
| "epoch": 21.72, |
| "learning_rate": 1.9326759765993586e-05, |
| "loss": 0.2147, |
| "step": 38720 |
| }, |
| { |
| "epoch": 21.72, |
| "learning_rate": 1.93239290432157e-05, |
| "loss": 0.228, |
| "step": 38730 |
| }, |
| { |
| "epoch": 21.73, |
| "learning_rate": 1.932109832043782e-05, |
| "loss": 0.2008, |
| "step": 38740 |
| }, |
| { |
| "epoch": 21.73, |
| "learning_rate": 1.9318267597659936e-05, |
| "loss": 0.211, |
| "step": 38750 |
| }, |
| { |
| "epoch": 21.74, |
| "learning_rate": 1.9315436874882056e-05, |
| "loss": 0.2262, |
| "step": 38760 |
| }, |
| { |
| "epoch": 21.74, |
| "learning_rate": 1.931260615210417e-05, |
| "loss": 0.2211, |
| "step": 38770 |
| }, |
| { |
| "epoch": 21.75, |
| "learning_rate": 1.9309775429326287e-05, |
| "loss": 0.2094, |
| "step": 38780 |
| }, |
| { |
| "epoch": 21.76, |
| "learning_rate": 1.9306944706548406e-05, |
| "loss": 0.2061, |
| "step": 38790 |
| }, |
| { |
| "epoch": 21.76, |
| "learning_rate": 1.9304113983770522e-05, |
| "loss": 0.2151, |
| "step": 38800 |
| }, |
| { |
| "epoch": 21.77, |
| "learning_rate": 1.930128326099264e-05, |
| "loss": 0.2257, |
| "step": 38810 |
| }, |
| { |
| "epoch": 21.77, |
| "learning_rate": 1.9298452538214757e-05, |
| "loss": 0.2161, |
| "step": 38820 |
| }, |
| { |
| "epoch": 21.78, |
| "learning_rate": 1.9295621815436876e-05, |
| "loss": 0.2166, |
| "step": 38830 |
| }, |
| { |
| "epoch": 21.78, |
| "learning_rate": 1.9292791092658992e-05, |
| "loss": 0.1931, |
| "step": 38840 |
| }, |
| { |
| "epoch": 21.79, |
| "learning_rate": 1.928996036988111e-05, |
| "loss": 0.2138, |
| "step": 38850 |
| }, |
| { |
| "epoch": 21.79, |
| "learning_rate": 1.9287129647103227e-05, |
| "loss": 0.2231, |
| "step": 38860 |
| }, |
| { |
| "epoch": 21.8, |
| "learning_rate": 1.9284298924325343e-05, |
| "loss": 0.217, |
| "step": 38870 |
| }, |
| { |
| "epoch": 21.81, |
| "learning_rate": 1.9281468201547462e-05, |
| "loss": 0.2033, |
| "step": 38880 |
| }, |
| { |
| "epoch": 21.81, |
| "learning_rate": 1.927863747876958e-05, |
| "loss": 0.2046, |
| "step": 38890 |
| }, |
| { |
| "epoch": 21.82, |
| "learning_rate": 1.9275806755991697e-05, |
| "loss": 0.2066, |
| "step": 38900 |
| }, |
| { |
| "epoch": 21.82, |
| "learning_rate": 1.9272976033213813e-05, |
| "loss": 0.2187, |
| "step": 38910 |
| }, |
| { |
| "epoch": 21.83, |
| "learning_rate": 1.9270145310435932e-05, |
| "loss": 0.2227, |
| "step": 38920 |
| }, |
| { |
| "epoch": 21.83, |
| "learning_rate": 1.926731458765805e-05, |
| "loss": 0.2283, |
| "step": 38930 |
| }, |
| { |
| "epoch": 21.84, |
| "learning_rate": 1.9264483864880167e-05, |
| "loss": 0.2153, |
| "step": 38940 |
| }, |
| { |
| "epoch": 21.84, |
| "learning_rate": 1.9261653142102283e-05, |
| "loss": 0.2039, |
| "step": 38950 |
| }, |
| { |
| "epoch": 21.85, |
| "learning_rate": 1.92588224193244e-05, |
| "loss": 0.2114, |
| "step": 38960 |
| }, |
| { |
| "epoch": 21.86, |
| "learning_rate": 1.925599169654652e-05, |
| "loss": 0.2186, |
| "step": 38970 |
| }, |
| { |
| "epoch": 21.86, |
| "learning_rate": 1.9253160973768637e-05, |
| "loss": 0.2103, |
| "step": 38980 |
| }, |
| { |
| "epoch": 21.87, |
| "learning_rate": 1.9250330250990753e-05, |
| "loss": 0.2107, |
| "step": 38990 |
| }, |
| { |
| "epoch": 21.87, |
| "learning_rate": 1.924749952821287e-05, |
| "loss": 0.21, |
| "step": 39000 |
| }, |
| { |
| "epoch": 21.88, |
| "learning_rate": 1.924466880543499e-05, |
| "loss": 0.2313, |
| "step": 39010 |
| }, |
| { |
| "epoch": 21.88, |
| "learning_rate": 1.9241838082657107e-05, |
| "loss": 0.224, |
| "step": 39020 |
| }, |
| { |
| "epoch": 21.89, |
| "learning_rate": 1.9239007359879223e-05, |
| "loss": 0.2077, |
| "step": 39030 |
| }, |
| { |
| "epoch": 21.9, |
| "learning_rate": 1.923617663710134e-05, |
| "loss": 0.2254, |
| "step": 39040 |
| }, |
| { |
| "epoch": 21.9, |
| "learning_rate": 1.9233345914323458e-05, |
| "loss": 0.2032, |
| "step": 39050 |
| }, |
| { |
| "epoch": 21.91, |
| "learning_rate": 1.9230515191545577e-05, |
| "loss": 0.2158, |
| "step": 39060 |
| }, |
| { |
| "epoch": 21.91, |
| "learning_rate": 1.9227684468767693e-05, |
| "loss": 0.2174, |
| "step": 39070 |
| }, |
| { |
| "epoch": 21.92, |
| "learning_rate": 1.922485374598981e-05, |
| "loss": 0.2015, |
| "step": 39080 |
| }, |
| { |
| "epoch": 21.92, |
| "learning_rate": 1.9222023023211928e-05, |
| "loss": 0.217, |
| "step": 39090 |
| }, |
| { |
| "epoch": 21.93, |
| "learning_rate": 1.9219192300434047e-05, |
| "loss": 0.2015, |
| "step": 39100 |
| }, |
| { |
| "epoch": 21.93, |
| "learning_rate": 1.9216361577656163e-05, |
| "loss": 0.2271, |
| "step": 39110 |
| }, |
| { |
| "epoch": 21.94, |
| "learning_rate": 1.921353085487828e-05, |
| "loss": 0.2198, |
| "step": 39120 |
| }, |
| { |
| "epoch": 21.95, |
| "learning_rate": 1.9210700132100398e-05, |
| "loss": 0.2107, |
| "step": 39130 |
| }, |
| { |
| "epoch": 21.95, |
| "learning_rate": 1.9207869409322514e-05, |
| "loss": 0.1933, |
| "step": 39140 |
| }, |
| { |
| "epoch": 21.96, |
| "learning_rate": 1.9205038686544633e-05, |
| "loss": 0.2232, |
| "step": 39150 |
| }, |
| { |
| "epoch": 21.96, |
| "learning_rate": 1.920220796376675e-05, |
| "loss": 0.2332, |
| "step": 39160 |
| }, |
| { |
| "epoch": 21.97, |
| "learning_rate": 1.9199377240988868e-05, |
| "loss": 0.2127, |
| "step": 39170 |
| }, |
| { |
| "epoch": 21.97, |
| "learning_rate": 1.9196546518210984e-05, |
| "loss": 0.2185, |
| "step": 39180 |
| }, |
| { |
| "epoch": 21.98, |
| "learning_rate": 1.9193715795433103e-05, |
| "loss": 0.213, |
| "step": 39190 |
| }, |
| { |
| "epoch": 21.99, |
| "learning_rate": 1.919088507265522e-05, |
| "loss": 0.1931, |
| "step": 39200 |
| }, |
| { |
| "epoch": 21.99, |
| "learning_rate": 1.9188054349877334e-05, |
| "loss": 0.2345, |
| "step": 39210 |
| }, |
| { |
| "epoch": 22.0, |
| "learning_rate": 1.9185223627099454e-05, |
| "loss": 0.2186, |
| "step": 39220 |
| }, |
| { |
| "epoch": 22.0, |
| "eval_cer": 0.16133035331418843, |
| "eval_loss": 0.4230673015117645, |
| "eval_runtime": 317.8016, |
| "eval_samples_per_second": 16.749, |
| "eval_steps_per_second": 4.188, |
| "eval_wer": 0.21932898822403515, |
| "step": 39226 |
| } |
| ], |
| "max_steps": 106980, |
| "num_train_epochs": 60, |
| "total_flos": 1.264165507015986e+19, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|