|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9995796553173603, |
|
"global_step": 1189, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 9.2645, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 8.7817, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 9.3407, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 9.1586, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 6.000000000000001e-08, |
|
"loss": 9.0122, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.000000000000001e-08, |
|
"loss": 9.1074, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.2000000000000002e-07, |
|
"loss": 8.7889, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.2000000000000002e-07, |
|
"loss": 8.9644, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.8e-07, |
|
"loss": 9.2456, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.4000000000000003e-07, |
|
"loss": 9.0984, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.0000000000000004e-07, |
|
"loss": 8.8478, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.6e-07, |
|
"loss": 9.243, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.2e-07, |
|
"loss": 8.9251, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.800000000000001e-07, |
|
"loss": 8.604, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.4e-07, |
|
"loss": 8.8964, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 8.5091, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.599999999999999e-07, |
|
"loss": 8.8942, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 7.2e-07, |
|
"loss": 9.0732, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 7.799999999999999e-07, |
|
"loss": 8.4959, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 8.4e-07, |
|
"loss": 8.6661, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9e-07, |
|
"loss": 8.3645, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.600000000000001e-07, |
|
"loss": 8.268, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.0200000000000002e-06, |
|
"loss": 8.4268, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.08e-06, |
|
"loss": 8.5528, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.14e-06, |
|
"loss": 8.3211, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 8.2367, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.26e-06, |
|
"loss": 8.2729, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.3199999999999999e-06, |
|
"loss": 8.0739, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.38e-06, |
|
"loss": 8.0769, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.44e-06, |
|
"loss": 8.1845, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.5e-06, |
|
"loss": 8.0313, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.5599999999999999e-06, |
|
"loss": 7.8045, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.62e-06, |
|
"loss": 7.9822, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.68e-06, |
|
"loss": 7.6708, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.74e-06, |
|
"loss": 8.2451, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.8e-06, |
|
"loss": 7.9231, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.86e-06, |
|
"loss": 7.8853, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9200000000000003e-06, |
|
"loss": 7.7427, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.98e-06, |
|
"loss": 7.9188, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.0400000000000004e-06, |
|
"loss": 7.7559, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.1000000000000002e-06, |
|
"loss": 7.6086, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.16e-06, |
|
"loss": 7.7548, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.22e-06, |
|
"loss": 7.6587, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.28e-06, |
|
"loss": 7.6761, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.34e-06, |
|
"loss": 7.8011, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 7.8535, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.46e-06, |
|
"loss": 7.8245, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.52e-06, |
|
"loss": 7.9046, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.58e-06, |
|
"loss": 7.801, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.6399999999999997e-06, |
|
"loss": 7.9401, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.7e-06, |
|
"loss": 7.5953, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.76e-06, |
|
"loss": 7.4365, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.82e-06, |
|
"loss": 7.32, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.88e-06, |
|
"loss": 7.389, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.9400000000000002e-06, |
|
"loss": 7.488, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3e-06, |
|
"loss": 7.2294, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.06e-06, |
|
"loss": 7.6729, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.1199999999999998e-06, |
|
"loss": 7.2778, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.18e-06, |
|
"loss": 7.2242, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.24e-06, |
|
"loss": 7.3987, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.3e-06, |
|
"loss": 7.2189, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.36e-06, |
|
"loss": 7.2479, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.4200000000000003e-06, |
|
"loss": 7.3781, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.48e-06, |
|
"loss": 7.3324, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.54e-06, |
|
"loss": 7.2571, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.6e-06, |
|
"loss": 7.3767, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.66e-06, |
|
"loss": 7.3103, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.72e-06, |
|
"loss": 7.2252, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.7800000000000002e-06, |
|
"loss": 7.1928, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.8400000000000005e-06, |
|
"loss": 7.22, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.9e-06, |
|
"loss": 7.3639, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.96e-06, |
|
"loss": 7.4193, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.0200000000000005e-06, |
|
"loss": 7.2587, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.080000000000001e-06, |
|
"loss": 7.3456, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.14e-06, |
|
"loss": 7.1123, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.2000000000000004e-06, |
|
"loss": 7.4739, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.26e-06, |
|
"loss": 7.224, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.32e-06, |
|
"loss": 7.078, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.3799999999999996e-06, |
|
"loss": 7.1178, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.44e-06, |
|
"loss": 7.1862, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.5e-06, |
|
"loss": 7.3286, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.56e-06, |
|
"loss": 7.3456, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.62e-06, |
|
"loss": 7.1114, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.68e-06, |
|
"loss": 7.2386, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.74e-06, |
|
"loss": 7.1793, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 7.2678, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.86e-06, |
|
"loss": 7.1509, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.92e-06, |
|
"loss": 7.2565, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.980000000000001e-06, |
|
"loss": 7.3592, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.04e-06, |
|
"loss": 7.1701, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.1e-06, |
|
"loss": 7.4446, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.16e-06, |
|
"loss": 7.289, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.22e-06, |
|
"loss": 6.9803, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.279999999999999e-06, |
|
"loss": 7.2983, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.34e-06, |
|
"loss": 7.4766, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.4e-06, |
|
"loss": 7.1428, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.46e-06, |
|
"loss": 7.3311, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.52e-06, |
|
"loss": 7.2306, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.58e-06, |
|
"loss": 6.9861, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.64e-06, |
|
"loss": 6.9041, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.7000000000000005e-06, |
|
"loss": 7.1725, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 5.76e-06, |
|
"loss": 7.0943, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 5.82e-06, |
|
"loss": 7.1549, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 5.8800000000000005e-06, |
|
"loss": 7.1566, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 5.940000000000001e-06, |
|
"loss": 7.0838, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 6e-06, |
|
"loss": 7.1625, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 6.0600000000000004e-06, |
|
"loss": 7.1241, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 6.12e-06, |
|
"loss": 7.1856, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 6.18e-06, |
|
"loss": 7.3023, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 6.2399999999999995e-06, |
|
"loss": 7.1465, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 6.3e-06, |
|
"loss": 7.2196, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 6.36e-06, |
|
"loss": 7.1786, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 6.42e-06, |
|
"loss": 7.1026, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 6.48e-06, |
|
"loss": 7.0959, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 6.54e-06, |
|
"loss": 7.1844, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 6.6e-06, |
|
"loss": 7.3039, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 6.660000000000001e-06, |
|
"loss": 7.113, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 6.72e-06, |
|
"loss": 6.9747, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 6.78e-06, |
|
"loss": 6.9265, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 6.840000000000001e-06, |
|
"loss": 7.1765, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 6.900000000000001e-06, |
|
"loss": 7.2618, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 6.96e-06, |
|
"loss": 7.0998, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 7.0200000000000006e-06, |
|
"loss": 7.1757, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 7.08e-06, |
|
"loss": 7.1495, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.14e-06, |
|
"loss": 7.103, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.2e-06, |
|
"loss": 6.9668, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.26e-06, |
|
"loss": 7.3312, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.32e-06, |
|
"loss": 7.0505, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.3800000000000005e-06, |
|
"loss": 7.0606, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.44e-06, |
|
"loss": 7.0443, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.5e-06, |
|
"loss": 6.9168, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.5600000000000005e-06, |
|
"loss": 7.0832, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.62e-06, |
|
"loss": 7.2462, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.680000000000001e-06, |
|
"loss": 7.0327, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.74e-06, |
|
"loss": 7.0976, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.8e-06, |
|
"loss": 7.0195, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 7.860000000000001e-06, |
|
"loss": 7.0406, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 7.92e-06, |
|
"loss": 7.1136, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 7.98e-06, |
|
"loss": 7.039, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.040000000000001e-06, |
|
"loss": 7.1659, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.1e-06, |
|
"loss": 7.093, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.160000000000001e-06, |
|
"loss": 7.0387, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.220000000000001e-06, |
|
"loss": 6.9572, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.28e-06, |
|
"loss": 7.0803, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.340000000000001e-06, |
|
"loss": 6.9381, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 7.1332, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.459999999999999e-06, |
|
"loss": 6.8416, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.52e-06, |
|
"loss": 6.6269, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8.58e-06, |
|
"loss": 6.7934, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8.64e-06, |
|
"loss": 6.66, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8.7e-06, |
|
"loss": 7.2848, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8.759999999999999e-06, |
|
"loss": 7.3452, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8.82e-06, |
|
"loss": 7.1858, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8.88e-06, |
|
"loss": 7.2224, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8.939999999999999e-06, |
|
"loss": 7.0784, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9e-06, |
|
"loss": 7.0554, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.06e-06, |
|
"loss": 7.0581, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.12e-06, |
|
"loss": 7.2136, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.18e-06, |
|
"loss": 6.9891, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.24e-06, |
|
"loss": 6.9661, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.3e-06, |
|
"loss": 7.0525, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.36e-06, |
|
"loss": 7.1545, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.42e-06, |
|
"loss": 7.2081, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.48e-06, |
|
"loss": 7.0543, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.54e-06, |
|
"loss": 7.028, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 6.9173, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.66e-06, |
|
"loss": 7.0702, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.72e-06, |
|
"loss": 7.0819, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.780000000000001e-06, |
|
"loss": 7.1053, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.84e-06, |
|
"loss": 6.9988, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.9e-06, |
|
"loss": 7.0302, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.960000000000001e-06, |
|
"loss": 7.0264, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.002e-05, |
|
"loss": 6.8528, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.008e-05, |
|
"loss": 7.0871, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.0140000000000001e-05, |
|
"loss": 7.119, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.02e-05, |
|
"loss": 7.2414, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.0260000000000002e-05, |
|
"loss": 7.1961, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.032e-05, |
|
"loss": 7.1284, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.0379999999999999e-05, |
|
"loss": 7.0386, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.044e-05, |
|
"loss": 7.1003, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.05e-05, |
|
"loss": 7.1393, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.0559999999999999e-05, |
|
"loss": 7.2247, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.062e-05, |
|
"loss": 7.0304, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.068e-05, |
|
"loss": 7.0428, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.074e-05, |
|
"loss": 6.9073, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.08e-05, |
|
"loss": 6.9143, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.086e-05, |
|
"loss": 7.1989, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.092e-05, |
|
"loss": 7.0031, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.098e-05, |
|
"loss": 7.0327, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.104e-05, |
|
"loss": 6.9772, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.11e-05, |
|
"loss": 7.2237, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.116e-05, |
|
"loss": 7.2198, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.1220000000000001e-05, |
|
"loss": 7.0253, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.128e-05, |
|
"loss": 7.0092, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.134e-05, |
|
"loss": 6.895, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.1400000000000001e-05, |
|
"loss": 6.8361, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.146e-05, |
|
"loss": 7.1704, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.152e-05, |
|
"loss": 6.8643, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.1580000000000001e-05, |
|
"loss": 6.7912, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.164e-05, |
|
"loss": 6.3712, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.1700000000000001e-05, |
|
"loss": 7.115, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.1760000000000001e-05, |
|
"loss": 7.0692, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.182e-05, |
|
"loss": 7.0783, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.1880000000000001e-05, |
|
"loss": 6.9704, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.1940000000000001e-05, |
|
"loss": 6.9688, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.2e-05, |
|
"loss": 7.0884, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.2060000000000001e-05, |
|
"loss": 7.0586, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.2120000000000001e-05, |
|
"loss": 7.2809, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.2180000000000002e-05, |
|
"loss": 7.1043, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.224e-05, |
|
"loss": 7.2292, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.2299999999999999e-05, |
|
"loss": 7.0086, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.236e-05, |
|
"loss": 6.9504, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.242e-05, |
|
"loss": 7.041, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.2479999999999999e-05, |
|
"loss": 7.0411, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.254e-05, |
|
"loss": 7.0403, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.26e-05, |
|
"loss": 7.009, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.2659999999999999e-05, |
|
"loss": 7.0629, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.272e-05, |
|
"loss": 7.0728, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.278e-05, |
|
"loss": 6.9722, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.284e-05, |
|
"loss": 7.1592, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.29e-05, |
|
"loss": 6.9572, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.296e-05, |
|
"loss": 7.2919, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.302e-05, |
|
"loss": 7.1749, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.308e-05, |
|
"loss": 7.1748, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.314e-05, |
|
"loss": 6.9979, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.32e-05, |
|
"loss": 6.8968, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.326e-05, |
|
"loss": 7.1294, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.3320000000000001e-05, |
|
"loss": 7.1157, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.338e-05, |
|
"loss": 6.9978, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.344e-05, |
|
"loss": 7.0651, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.3500000000000001e-05, |
|
"loss": 7.1366, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.356e-05, |
|
"loss": 7.0621, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.362e-05, |
|
"loss": 7.0283, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.3680000000000001e-05, |
|
"loss": 6.9114, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.374e-05, |
|
"loss": 7.1182, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.3800000000000002e-05, |
|
"loss": 7.1036, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.3860000000000001e-05, |
|
"loss": 7.0777, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.392e-05, |
|
"loss": 6.9912, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.3980000000000002e-05, |
|
"loss": 7.0432, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.4040000000000001e-05, |
|
"loss": 6.8894, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.4099999999999999e-05, |
|
"loss": 7.0478, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.416e-05, |
|
"loss": 7.0257, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.422e-05, |
|
"loss": 7.1119, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.428e-05, |
|
"loss": 7.1485, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.434e-05, |
|
"loss": 6.8617, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.44e-05, |
|
"loss": 6.8215, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.446e-05, |
|
"loss": 7.3806, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.452e-05, |
|
"loss": 6.7644, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.458e-05, |
|
"loss": 6.6447, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.464e-05, |
|
"loss": 6.4345, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.47e-05, |
|
"loss": 7.2032, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.4760000000000001e-05, |
|
"loss": 6.9527, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.482e-05, |
|
"loss": 7.1685, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.488e-05, |
|
"loss": 6.9349, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.4940000000000001e-05, |
|
"loss": 6.9983, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.5e-05, |
|
"loss": 7.0868, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.506e-05, |
|
"loss": 6.9868, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.5120000000000001e-05, |
|
"loss": 7.1154, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.518e-05, |
|
"loss": 6.9745, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.524e-05, |
|
"loss": 6.9459, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.53e-05, |
|
"loss": 6.999, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.5360000000000002e-05, |
|
"loss": 6.9631, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.542e-05, |
|
"loss": 6.9308, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.548e-05, |
|
"loss": 7.047, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.554e-05, |
|
"loss": 6.9798, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.56e-05, |
|
"loss": 7.029, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.5660000000000003e-05, |
|
"loss": 6.9405, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.5720000000000002e-05, |
|
"loss": 6.9293, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.578e-05, |
|
"loss": 7.0008, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.584e-05, |
|
"loss": 7.0297, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.59e-05, |
|
"loss": 7.0667, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.596e-05, |
|
"loss": 6.9658, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.6020000000000002e-05, |
|
"loss": 7.0879, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.6080000000000002e-05, |
|
"loss": 7.0809, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.614e-05, |
|
"loss": 7.089, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.62e-05, |
|
"loss": 6.8732, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.626e-05, |
|
"loss": 6.9728, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.6320000000000003e-05, |
|
"loss": 6.9928, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.6380000000000002e-05, |
|
"loss": 7.1316, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.6440000000000002e-05, |
|
"loss": 6.8699, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.65e-05, |
|
"loss": 6.8749, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.656e-05, |
|
"loss": 6.9679, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.6620000000000004e-05, |
|
"loss": 7.1804, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.6680000000000003e-05, |
|
"loss": 7.1059, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.6740000000000002e-05, |
|
"loss": 6.9953, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.6800000000000002e-05, |
|
"loss": 7.0, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.686e-05, |
|
"loss": 6.9042, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.6919999999999997e-05, |
|
"loss": 6.9412, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.698e-05, |
|
"loss": 7.0456, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.704e-05, |
|
"loss": 6.9086, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.71e-05, |
|
"loss": 7.1757, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.716e-05, |
|
"loss": 7.0248, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7219999999999998e-05, |
|
"loss": 6.7068, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.728e-05, |
|
"loss": 6.8182, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.734e-05, |
|
"loss": 6.9292, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.74e-05, |
|
"loss": 7.0076, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.746e-05, |
|
"loss": 6.7041, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7519999999999998e-05, |
|
"loss": 6.7559, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.758e-05, |
|
"loss": 6.8882, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.764e-05, |
|
"loss": 6.5027, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.77e-05, |
|
"loss": 7.0319, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.776e-05, |
|
"loss": 7.1752, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.782e-05, |
|
"loss": 7.2225, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7879999999999998e-05, |
|
"loss": 6.9304, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.794e-05, |
|
"loss": 6.9973, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.8e-05, |
|
"loss": 6.9159, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.806e-05, |
|
"loss": 6.9768, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.812e-05, |
|
"loss": 6.9426, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.818e-05, |
|
"loss": 6.8278, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.824e-05, |
|
"loss": 7.0905, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.83e-05, |
|
"loss": 6.7491, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.836e-05, |
|
"loss": 6.7998, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.842e-05, |
|
"loss": 6.8631, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.848e-05, |
|
"loss": 6.9686, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.854e-05, |
|
"loss": 6.8764, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.86e-05, |
|
"loss": 7.1163, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.866e-05, |
|
"loss": 6.9662, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.872e-05, |
|
"loss": 7.0056, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.878e-05, |
|
"loss": 6.8595, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.884e-05, |
|
"loss": 6.8479, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.8900000000000002e-05, |
|
"loss": 6.8847, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.896e-05, |
|
"loss": 6.8581, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.902e-05, |
|
"loss": 6.8487, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.908e-05, |
|
"loss": 6.9216, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.914e-05, |
|
"loss": 6.7967, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9200000000000003e-05, |
|
"loss": 6.9835, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9260000000000002e-05, |
|
"loss": 6.9141, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.932e-05, |
|
"loss": 6.9604, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.938e-05, |
|
"loss": 6.8948, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.944e-05, |
|
"loss": 7.0328, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.95e-05, |
|
"loss": 6.9079, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9560000000000002e-05, |
|
"loss": 7.0494, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9620000000000002e-05, |
|
"loss": 7.0634, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.968e-05, |
|
"loss": 6.8763, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.974e-05, |
|
"loss": 6.9562, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.98e-05, |
|
"loss": 7.0171, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9860000000000003e-05, |
|
"loss": 6.8003, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9920000000000002e-05, |
|
"loss": 7.075, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9980000000000002e-05, |
|
"loss": 6.8714, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.004e-05, |
|
"loss": 6.9402, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.01e-05, |
|
"loss": 6.8398, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.016e-05, |
|
"loss": 7.0171, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.0220000000000003e-05, |
|
"loss": 6.779, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.0280000000000002e-05, |
|
"loss": 6.9852, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.0340000000000002e-05, |
|
"loss": 6.7591, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.04e-05, |
|
"loss": 6.7046, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.046e-05, |
|
"loss": 6.8935, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.0520000000000003e-05, |
|
"loss": 6.9409, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.0580000000000003e-05, |
|
"loss": 6.5184, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 2.064e-05, |
|
"loss": 6.6459, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.07e-05, |
|
"loss": 7.2592, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.0759999999999998e-05, |
|
"loss": 7.1277, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.082e-05, |
|
"loss": 7.0065, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.088e-05, |
|
"loss": 7.0755, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.094e-05, |
|
"loss": 6.9331, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.1e-05, |
|
"loss": 7.0235, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.1059999999999998e-05, |
|
"loss": 7.0566, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.1119999999999998e-05, |
|
"loss": 6.8507, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.118e-05, |
|
"loss": 6.9441, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.124e-05, |
|
"loss": 7.0454, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.13e-05, |
|
"loss": 6.8663, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.136e-05, |
|
"loss": 6.8307, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.1419999999999998e-05, |
|
"loss": 7.0394, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.148e-05, |
|
"loss": 6.9532, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.154e-05, |
|
"loss": 6.8977, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.16e-05, |
|
"loss": 6.995, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.166e-05, |
|
"loss": 6.963, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.172e-05, |
|
"loss": 7.0942, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.178e-05, |
|
"loss": 6.9486, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.184e-05, |
|
"loss": 7.0199, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.19e-05, |
|
"loss": 6.8288, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.196e-05, |
|
"loss": 6.8878, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.202e-05, |
|
"loss": 6.7846, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 2.208e-05, |
|
"loss": 6.8995, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.214e-05, |
|
"loss": 6.8338, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.22e-05, |
|
"loss": 7.0227, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.226e-05, |
|
"loss": 6.859, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.232e-05, |
|
"loss": 6.9013, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.238e-05, |
|
"loss": 6.929, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.2440000000000002e-05, |
|
"loss": 6.8989, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.25e-05, |
|
"loss": 6.8926, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.256e-05, |
|
"loss": 6.8915, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.262e-05, |
|
"loss": 6.7224, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.268e-05, |
|
"loss": 7.0929, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.274e-05, |
|
"loss": 6.8961, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.2800000000000002e-05, |
|
"loss": 6.7993, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.286e-05, |
|
"loss": 6.8447, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.292e-05, |
|
"loss": 6.914, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.298e-05, |
|
"loss": 7.0236, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.304e-05, |
|
"loss": 7.1832, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.3100000000000002e-05, |
|
"loss": 7.0389, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.3160000000000002e-05, |
|
"loss": 6.8648, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.322e-05, |
|
"loss": 6.7297, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.328e-05, |
|
"loss": 6.858, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.334e-05, |
|
"loss": 6.677, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.3400000000000003e-05, |
|
"loss": 6.7588, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.3460000000000002e-05, |
|
"loss": 6.4795, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 2.3520000000000002e-05, |
|
"loss": 6.5628, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.358e-05, |
|
"loss": 6.4478, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.364e-05, |
|
"loss": 6.2483, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.37e-05, |
|
"loss": 7.0719, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.3760000000000003e-05, |
|
"loss": 7.0878, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.3820000000000002e-05, |
|
"loss": 7.1169, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.3880000000000002e-05, |
|
"loss": 7.1507, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.394e-05, |
|
"loss": 6.8746, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.4e-05, |
|
"loss": 6.9829, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.4060000000000003e-05, |
|
"loss": 6.9614, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.4120000000000003e-05, |
|
"loss": 6.9648, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.4180000000000002e-05, |
|
"loss": 6.8028, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.4240000000000002e-05, |
|
"loss": 7.0344, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.43e-05, |
|
"loss": 6.9742, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.4360000000000004e-05, |
|
"loss": 6.9888, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.442e-05, |
|
"loss": 6.8872, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.448e-05, |
|
"loss": 6.8158, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.454e-05, |
|
"loss": 6.8687, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.4599999999999998e-05, |
|
"loss": 7.0971, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.4659999999999998e-05, |
|
"loss": 6.8733, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.472e-05, |
|
"loss": 6.7277, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.478e-05, |
|
"loss": 6.8349, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.484e-05, |
|
"loss": 7.0487, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.49e-05, |
|
"loss": 6.9402, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.4959999999999998e-05, |
|
"loss": 6.8249, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.502e-05, |
|
"loss": 6.9416, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.508e-05, |
|
"loss": 6.8635, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.514e-05, |
|
"loss": 7.0993, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.52e-05, |
|
"loss": 6.9045, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.526e-05, |
|
"loss": 6.8628, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.5319999999999998e-05, |
|
"loss": 6.94, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.538e-05, |
|
"loss": 7.0008, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.544e-05, |
|
"loss": 6.928, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.55e-05, |
|
"loss": 7.0018, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.556e-05, |
|
"loss": 6.6915, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.562e-05, |
|
"loss": 6.9509, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.568e-05, |
|
"loss": 6.894, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.574e-05, |
|
"loss": 7.0125, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.58e-05, |
|
"loss": 6.743, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.586e-05, |
|
"loss": 6.8109, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.592e-05, |
|
"loss": 6.7761, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.5980000000000002e-05, |
|
"loss": 6.9957, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.604e-05, |
|
"loss": 6.8469, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.61e-05, |
|
"loss": 6.9463, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.616e-05, |
|
"loss": 6.7968, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.622e-05, |
|
"loss": 6.8395, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.628e-05, |
|
"loss": 6.7478, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.6340000000000002e-05, |
|
"loss": 6.5671, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 2.64e-05, |
|
"loss": 6.766, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.646e-05, |
|
"loss": 6.6719, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.652e-05, |
|
"loss": 6.8373, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.658e-05, |
|
"loss": 6.5786, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.6640000000000002e-05, |
|
"loss": 6.3753, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.6700000000000002e-05, |
|
"loss": 6.9095, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.676e-05, |
|
"loss": 6.8914, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.682e-05, |
|
"loss": 6.9704, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.688e-05, |
|
"loss": 6.9067, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.6940000000000003e-05, |
|
"loss": 6.9692, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.7000000000000002e-05, |
|
"loss": 6.9132, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.7060000000000002e-05, |
|
"loss": 6.945, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.712e-05, |
|
"loss": 7.0245, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.718e-05, |
|
"loss": 7.0023, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.724e-05, |
|
"loss": 6.981, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.7300000000000003e-05, |
|
"loss": 6.8999, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.7360000000000002e-05, |
|
"loss": 6.8976, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.7420000000000002e-05, |
|
"loss": 6.9406, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.748e-05, |
|
"loss": 6.9898, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.754e-05, |
|
"loss": 6.9237, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.7600000000000003e-05, |
|
"loss": 6.9051, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.7660000000000003e-05, |
|
"loss": 6.7976, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.7720000000000002e-05, |
|
"loss": 6.86, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 2.778e-05, |
|
"loss": 6.6787, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.784e-05, |
|
"loss": 7.101, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.79e-05, |
|
"loss": 6.8838, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.7960000000000003e-05, |
|
"loss": 7.0176, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.8020000000000003e-05, |
|
"loss": 6.9806, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.8080000000000002e-05, |
|
"loss": 7.0431, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.8139999999999998e-05, |
|
"loss": 6.8727, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.8199999999999998e-05, |
|
"loss": 6.7498, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.826e-05, |
|
"loss": 6.6904, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.832e-05, |
|
"loss": 6.8535, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.838e-05, |
|
"loss": 6.842, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.844e-05, |
|
"loss": 7.0386, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.8499999999999998e-05, |
|
"loss": 6.8259, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.856e-05, |
|
"loss": 6.8453, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.862e-05, |
|
"loss": 6.8384, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.868e-05, |
|
"loss": 6.9339, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.874e-05, |
|
"loss": 6.9688, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.88e-05, |
|
"loss": 6.7787, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.8859999999999998e-05, |
|
"loss": 6.8239, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.892e-05, |
|
"loss": 6.9513, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.898e-05, |
|
"loss": 6.9371, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.904e-05, |
|
"loss": 6.8265, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.91e-05, |
|
"loss": 6.973, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.916e-05, |
|
"loss": 6.9751, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.922e-05, |
|
"loss": 7.0204, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.928e-05, |
|
"loss": 6.9157, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.934e-05, |
|
"loss": 6.8475, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.94e-05, |
|
"loss": 6.7319, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.946e-05, |
|
"loss": 6.7064, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.9520000000000002e-05, |
|
"loss": 6.5884, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.958e-05, |
|
"loss": 6.547, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.964e-05, |
|
"loss": 6.5033, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"eval_loss": 6.841328144073486, |
|
"eval_runtime": 400.1835, |
|
"eval_samples_per_second": 6.602, |
|
"eval_steps_per_second": 0.552, |
|
"eval_wer": 0.9372383276782348, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.97e-05, |
|
"loss": 7.0164, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.976e-05, |
|
"loss": 7.0229, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.982e-05, |
|
"loss": 7.0215, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.9880000000000002e-05, |
|
"loss": 6.9711, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.994e-05, |
|
"loss": 6.971, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 3e-05, |
|
"loss": 7.077, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.995645863570392e-05, |
|
"loss": 6.9226, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.9912917271407838e-05, |
|
"loss": 6.9394, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.9869375907111758e-05, |
|
"loss": 6.8403, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.9825834542815675e-05, |
|
"loss": 6.9181, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.9782293178519592e-05, |
|
"loss": 6.7456, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.9738751814223512e-05, |
|
"loss": 6.9487, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.9695210449927432e-05, |
|
"loss": 6.9014, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.9651669085631353e-05, |
|
"loss": 6.8337, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.960812772133527e-05, |
|
"loss": 6.9608, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.956458635703919e-05, |
|
"loss": 6.7855, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.9521044992743107e-05, |
|
"loss": 6.9306, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.9477503628447024e-05, |
|
"loss": 6.8124, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.9433962264150944e-05, |
|
"loss": 7.0741, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.9390420899854864e-05, |
|
"loss": 6.8125, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.934687953555878e-05, |
|
"loss": 6.9402, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.93033381712627e-05, |
|
"loss": 6.9112, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.925979680696662e-05, |
|
"loss": 6.9402, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.9216255442670535e-05, |
|
"loss": 6.8678, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.9172714078374455e-05, |
|
"loss": 6.8617, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.9129172714078376e-05, |
|
"loss": 6.7642, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.9085631349782292e-05, |
|
"loss": 6.9013, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.9042089985486213e-05, |
|
"loss": 6.9558, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.8998548621190133e-05, |
|
"loss": 6.8787, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.8955007256894053e-05, |
|
"loss": 6.9184, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.8911465892597967e-05, |
|
"loss": 6.8576, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.8867924528301887e-05, |
|
"loss": 6.9178, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.8824383164005807e-05, |
|
"loss": 6.7771, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.8780841799709724e-05, |
|
"loss": 6.813, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.8737300435413644e-05, |
|
"loss": 6.9827, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.8693759071117565e-05, |
|
"loss": 6.7773, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.865021770682148e-05, |
|
"loss": 6.8303, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.86066763425254e-05, |
|
"loss": 6.7075, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.856313497822932e-05, |
|
"loss": 6.9048, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.8519593613933236e-05, |
|
"loss": 6.9648, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.8476052249637156e-05, |
|
"loss": 6.9927, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.8432510885341076e-05, |
|
"loss": 6.7953, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.8388969521044996e-05, |
|
"loss": 6.6269, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.834542815674891e-05, |
|
"loss": 6.9967, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.830188679245283e-05, |
|
"loss": 6.8509, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.825834542815675e-05, |
|
"loss": 6.7416, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.8214804063860667e-05, |
|
"loss": 6.4946, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.8171262699564588e-05, |
|
"loss": 6.6493, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.8127721335268508e-05, |
|
"loss": 6.5604, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.8084179970972425e-05, |
|
"loss": 6.447, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.804063860667634e-05, |
|
"loss": 7.1064, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.7997097242380262e-05, |
|
"loss": 6.9909, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.795355587808418e-05, |
|
"loss": 7.0095, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.79100145137881e-05, |
|
"loss": 6.9143, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.786647314949202e-05, |
|
"loss": 7.0488, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.7822931785195936e-05, |
|
"loss": 6.9259, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.7779390420899856e-05, |
|
"loss": 6.9317, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.7735849056603773e-05, |
|
"loss": 6.8489, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.7692307692307694e-05, |
|
"loss": 6.9947, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.764876632801161e-05, |
|
"loss": 6.8184, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.760522496371553e-05, |
|
"loss": 7.1007, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.756168359941945e-05, |
|
"loss": 6.7257, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.7518142235123368e-05, |
|
"loss": 6.9874, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.7474600870827288e-05, |
|
"loss": 6.6412, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.7431059506531205e-05, |
|
"loss": 6.8179, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.7387518142235122e-05, |
|
"loss": 6.8657, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.7343976777939042e-05, |
|
"loss": 6.7908, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.7300435413642962e-05, |
|
"loss": 6.8527, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.725689404934688e-05, |
|
"loss": 6.8393, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.72133526850508e-05, |
|
"loss": 6.8598, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.716981132075472e-05, |
|
"loss": 6.932, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.7126269956458637e-05, |
|
"loss": 6.8856, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.7082728592162554e-05, |
|
"loss": 6.8599, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.7039187227866474e-05, |
|
"loss": 6.9485, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.6995645863570394e-05, |
|
"loss": 7.0858, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.695210449927431e-05, |
|
"loss": 6.9847, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.690856313497823e-05, |
|
"loss": 6.9401, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.686502177068215e-05, |
|
"loss": 6.8819, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.6821480406386065e-05, |
|
"loss": 6.833, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.6777939042089985e-05, |
|
"loss": 7.0195, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.6734397677793905e-05, |
|
"loss": 7.0122, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.6690856313497822e-05, |
|
"loss": 6.7956, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.6647314949201743e-05, |
|
"loss": 6.8513, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.6603773584905663e-05, |
|
"loss": 6.7966, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.656023222060958e-05, |
|
"loss": 7.043, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.6516690856313497e-05, |
|
"loss": 6.8917, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.6473149492017417e-05, |
|
"loss": 6.9744, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.6429608127721337e-05, |
|
"loss": 6.8343, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.6386066763425254e-05, |
|
"loss": 6.7409, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.6342525399129174e-05, |
|
"loss": 6.8391, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.6298984034833095e-05, |
|
"loss": 6.9164, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.625544267053701e-05, |
|
"loss": 6.7872, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.621190130624093e-05, |
|
"loss": 6.9429, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.616835994194485e-05, |
|
"loss": 6.7511, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.6124818577648765e-05, |
|
"loss": 6.6429, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.6081277213352686e-05, |
|
"loss": 6.6894, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.6037735849056606e-05, |
|
"loss": 6.586, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.5994194484760523e-05, |
|
"loss": 6.6416, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.5950653120464443e-05, |
|
"loss": 6.47, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.590711175616836e-05, |
|
"loss": 6.3261, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.5863570391872277e-05, |
|
"loss": 6.8392, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.5820029027576197e-05, |
|
"loss": 7.0316, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.5776487663280117e-05, |
|
"loss": 7.05, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.5732946298984038e-05, |
|
"loss": 6.9018, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.5689404934687955e-05, |
|
"loss": 7.0057, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.564586357039187e-05, |
|
"loss": 7.0843, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.5602322206095792e-05, |
|
"loss": 6.7689, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.555878084179971e-05, |
|
"loss": 6.9561, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.551523947750363e-05, |
|
"loss": 6.8926, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.547169811320755e-05, |
|
"loss": 6.932, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.5428156748911466e-05, |
|
"loss": 6.9758, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.5384615384615386e-05, |
|
"loss": 6.8192, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.5341074020319303e-05, |
|
"loss": 6.8481, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.529753265602322e-05, |
|
"loss": 6.8006, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.525399129172714e-05, |
|
"loss": 6.9059, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.521044992743106e-05, |
|
"loss": 6.7516, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.516690856313498e-05, |
|
"loss": 6.8489, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.5123367198838898e-05, |
|
"loss": 6.8529, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.5079825834542818e-05, |
|
"loss": 6.902, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.5036284470246735e-05, |
|
"loss": 7.0009, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.4992743105950652e-05, |
|
"loss": 6.6571, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.4949201741654572e-05, |
|
"loss": 6.7878, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.4905660377358492e-05, |
|
"loss": 6.8093, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.486211901306241e-05, |
|
"loss": 6.8768, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.481857764876633e-05, |
|
"loss": 6.9355, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.477503628447025e-05, |
|
"loss": 6.9823, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.4731494920174163e-05, |
|
"loss": 6.9445, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.4687953555878083e-05, |
|
"loss": 6.6985, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.4644412191582004e-05, |
|
"loss": 6.7972, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.460087082728592e-05, |
|
"loss": 6.787, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.455732946298984e-05, |
|
"loss": 6.8726, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.451378809869376e-05, |
|
"loss": 6.9417, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.447024673439768e-05, |
|
"loss": 6.8121, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.4426705370101595e-05, |
|
"loss": 6.9578, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.4383164005805515e-05, |
|
"loss": 6.8855, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.4339622641509435e-05, |
|
"loss": 6.9087, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.4296081277213352e-05, |
|
"loss": 6.7884, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.4252539912917273e-05, |
|
"loss": 6.7684, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.4208998548621193e-05, |
|
"loss": 6.8416, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.416545718432511e-05, |
|
"loss": 7.0718, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.4121915820029027e-05, |
|
"loss": 6.7635, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.4078374455732947e-05, |
|
"loss": 6.7437, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.4034833091436864e-05, |
|
"loss": 6.9673, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.3991291727140784e-05, |
|
"loss": 6.7992, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.3947750362844704e-05, |
|
"loss": 6.7811, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.3904208998548625e-05, |
|
"loss": 6.7006, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.386066763425254e-05, |
|
"loss": 6.2627, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.3817126269956458e-05, |
|
"loss": 6.7778, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.377358490566038e-05, |
|
"loss": 6.7382, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.3730043541364295e-05, |
|
"loss": 6.2313, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.3686502177068216e-05, |
|
"loss": 7.1546, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.3642960812772136e-05, |
|
"loss": 6.9485, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.3599419448476053e-05, |
|
"loss": 7.0624, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.3555878084179973e-05, |
|
"loss": 7.0845, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.351233671988389e-05, |
|
"loss": 7.0455, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.3468795355587807e-05, |
|
"loss": 6.9887, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.3425253991291727e-05, |
|
"loss": 6.8703, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.3381712626995647e-05, |
|
"loss": 6.9818, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.3338171262699564e-05, |
|
"loss": 6.9037, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.3294629898403485e-05, |
|
"loss": 6.8921, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.3251088534107405e-05, |
|
"loss": 6.809, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.320754716981132e-05, |
|
"loss": 6.9329, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.316400580551524e-05, |
|
"loss": 6.7156, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.312046444121916e-05, |
|
"loss": 6.9578, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.307692307692308e-05, |
|
"loss": 6.9316, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.3033381712626996e-05, |
|
"loss": 6.7878, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.2989840348330916e-05, |
|
"loss": 6.9459, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.2946298984034833e-05, |
|
"loss": 6.8437, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.290275761973875e-05, |
|
"loss": 7.1687, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.285921625544267e-05, |
|
"loss": 6.9882, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.281567489114659e-05, |
|
"loss": 6.8498, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.2772133526850507e-05, |
|
"loss": 6.8296, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.2728592162554428e-05, |
|
"loss": 6.8387, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.2685050798258348e-05, |
|
"loss": 6.8033, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.2641509433962265e-05, |
|
"loss": 6.9487, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.259796806966618e-05, |
|
"loss": 6.8176, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.2554426705370102e-05, |
|
"loss": 7.0318, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.2510885341074022e-05, |
|
"loss": 7.0025, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.246734397677794e-05, |
|
"loss": 6.868, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.242380261248186e-05, |
|
"loss": 7.0214, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.238026124818578e-05, |
|
"loss": 6.6984, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.2336719883889693e-05, |
|
"loss": 6.7508, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.2293178519593613e-05, |
|
"loss": 6.8234, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.2249637155297534e-05, |
|
"loss": 6.7897, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.220609579100145e-05, |
|
"loss": 6.8332, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.216255442670537e-05, |
|
"loss": 6.8198, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.211901306240929e-05, |
|
"loss": 6.9087, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.2075471698113208e-05, |
|
"loss": 6.854, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.2031930333817125e-05, |
|
"loss": 7.0296, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.1988388969521045e-05, |
|
"loss": 6.8097, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.1944847605224965e-05, |
|
"loss": 7.042, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.1901306240928882e-05, |
|
"loss": 6.756, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.1857764876632802e-05, |
|
"loss": 6.7333, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.1814223512336723e-05, |
|
"loss": 6.8924, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.177068214804064e-05, |
|
"loss": 6.7768, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.1727140783744556e-05, |
|
"loss": 6.5173, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.1683599419448477e-05, |
|
"loss": 6.868, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.1640058055152394e-05, |
|
"loss": 6.8194, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.1596516690856314e-05, |
|
"loss": 6.4428, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.1552975326560234e-05, |
|
"loss": 6.6834, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.150943396226415e-05, |
|
"loss": 7.0682, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.146589259796807e-05, |
|
"loss": 6.8321, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.1422351233671988e-05, |
|
"loss": 7.0101, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.137880986937591e-05, |
|
"loss": 6.8518, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.1335268505079825e-05, |
|
"loss": 6.9182, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.1291727140783746e-05, |
|
"loss": 6.9742, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.1248185776487666e-05, |
|
"loss": 6.9318, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 2.1204644412191583e-05, |
|
"loss": 6.8179, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 2.1161103047895503e-05, |
|
"loss": 6.9117, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 2.111756168359942e-05, |
|
"loss": 6.978, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 2.1074020319303337e-05, |
|
"loss": 6.7896, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 2.1030478955007257e-05, |
|
"loss": 7.0812, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 2.0986937590711177e-05, |
|
"loss": 6.9035, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 2.0943396226415094e-05, |
|
"loss": 6.9047, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 2.0899854862119014e-05, |
|
"loss": 6.8181, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 2.0856313497822935e-05, |
|
"loss": 6.8149, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 2.0812772133526848e-05, |
|
"loss": 6.8505, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 2.076923076923077e-05, |
|
"loss": 6.9516, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 2.072568940493469e-05, |
|
"loss": 6.9214, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 2.068214804063861e-05, |
|
"loss": 6.8467, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 2.0638606676342526e-05, |
|
"loss": 6.7056, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 2.0595065312046446e-05, |
|
"loss": 6.7862, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 2.0551523947750363e-05, |
|
"loss": 6.7903, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 2.050798258345428e-05, |
|
"loss": 6.7591, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 2.04644412191582e-05, |
|
"loss": 6.8493, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 2.042089985486212e-05, |
|
"loss": 6.8346, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 2.0377358490566037e-05, |
|
"loss": 6.7957, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 2.0333817126269958e-05, |
|
"loss": 6.9421, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 2.0290275761973878e-05, |
|
"loss": 6.8396, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 2.024673439767779e-05, |
|
"loss": 6.8857, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 2.020319303338171e-05, |
|
"loss": 6.8232, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 2.0159651669085632e-05, |
|
"loss": 6.8886, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 2.011611030478955e-05, |
|
"loss": 6.9045, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 2.007256894049347e-05, |
|
"loss": 6.7822, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 2.002902757619739e-05, |
|
"loss": 6.8733, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.998548621190131e-05, |
|
"loss": 6.8417, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.9941944847605223e-05, |
|
"loss": 7.0115, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.9898403483309143e-05, |
|
"loss": 6.9775, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.9854862119013064e-05, |
|
"loss": 6.9662, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.981132075471698e-05, |
|
"loss": 6.8292, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.97677793904209e-05, |
|
"loss": 6.9662, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.972423802612482e-05, |
|
"loss": 6.9709, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.9680696661828738e-05, |
|
"loss": 6.9464, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.9637155297532655e-05, |
|
"loss": 6.7093, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.9593613933236575e-05, |
|
"loss": 6.7003, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.9550072568940492e-05, |
|
"loss": 6.6034, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.9506531204644412e-05, |
|
"loss": 6.7655, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.9462989840348332e-05, |
|
"loss": 6.9473, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.9419448476052253e-05, |
|
"loss": 6.5144, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.937590711175617e-05, |
|
"loss": 6.1647, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.9332365747460086e-05, |
|
"loss": 6.9554, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.9288824383164007e-05, |
|
"loss": 7.0884, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.9245283018867924e-05, |
|
"loss": 6.9059, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.9201741654571844e-05, |
|
"loss": 6.8753, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.9158200290275764e-05, |
|
"loss": 7.0667, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.911465892597968e-05, |
|
"loss": 6.9856, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.90711175616836e-05, |
|
"loss": 6.9196, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.9027576197387518e-05, |
|
"loss": 7.0209, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8984034833091435e-05, |
|
"loss": 6.7748, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8940493468795355e-05, |
|
"loss": 6.9459, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8896952104499276e-05, |
|
"loss": 6.8833, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8853410740203192e-05, |
|
"loss": 6.7457, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8809869375907113e-05, |
|
"loss": 6.8224, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8766328011611033e-05, |
|
"loss": 6.7349, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.872278664731495e-05, |
|
"loss": 7.1265, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8679245283018867e-05, |
|
"loss": 6.8845, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8635703918722787e-05, |
|
"loss": 6.8941, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8592162554426707e-05, |
|
"loss": 6.7019, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8548621190130624e-05, |
|
"loss": 6.8317, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8505079825834544e-05, |
|
"loss": 6.9009, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8461538461538465e-05, |
|
"loss": 6.8667, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8417997097242378e-05, |
|
"loss": 7.018, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.83744557329463e-05, |
|
"loss": 6.8595, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.833091436865022e-05, |
|
"loss": 6.7181, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8287373004354136e-05, |
|
"loss": 6.9806, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8243831640058056e-05, |
|
"loss": 6.8173, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8200290275761976e-05, |
|
"loss": 6.7969, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8156748911465896e-05, |
|
"loss": 6.8389, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.811320754716981e-05, |
|
"loss": 6.8899, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.806966618287373e-05, |
|
"loss": 7.0669, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.802612481857765e-05, |
|
"loss": 6.9312, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.7982583454281567e-05, |
|
"loss": 6.784, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.7939042089985487e-05, |
|
"loss": 6.6587, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.7895500725689408e-05, |
|
"loss": 6.9062, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.7851959361393325e-05, |
|
"loss": 6.9787, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.780841799709724e-05, |
|
"loss": 6.6987, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.7764876632801162e-05, |
|
"loss": 6.9932, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.772133526850508e-05, |
|
"loss": 6.7969, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.7677793904209e-05, |
|
"loss": 6.8405, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.763425253991292e-05, |
|
"loss": 6.7624, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.7590711175616836e-05, |
|
"loss": 6.941, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.7547169811320753e-05, |
|
"loss": 6.7807, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.7503628447024673e-05, |
|
"loss": 6.7793, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.7460087082728593e-05, |
|
"loss": 6.6572, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.741654571843251e-05, |
|
"loss": 6.6306, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.737300435413643e-05, |
|
"loss": 6.5386, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.732946298984035e-05, |
|
"loss": 6.5509, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.7285921625544268e-05, |
|
"loss": 6.5294, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.7242380261248185e-05, |
|
"loss": 6.4724, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.7198838896952105e-05, |
|
"loss": 6.2469, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.7155297532656022e-05, |
|
"loss": 6.902, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.7111756168359942e-05, |
|
"loss": 6.8033, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.7068214804063862e-05, |
|
"loss": 6.8523, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.702467343976778e-05, |
|
"loss": 6.8993, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.69811320754717e-05, |
|
"loss": 6.9154, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.6937590711175616e-05, |
|
"loss": 6.8057, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.6894049346879537e-05, |
|
"loss": 6.8261, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.6850507982583453e-05, |
|
"loss": 6.8919, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.6806966618287374e-05, |
|
"loss": 6.8706, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.6763425253991294e-05, |
|
"loss": 6.8791, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.671988388969521e-05, |
|
"loss": 6.7978, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.667634252539913e-05, |
|
"loss": 6.8467, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.6632801161103048e-05, |
|
"loss": 6.7455, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.6589259796806965e-05, |
|
"loss": 7.0637, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.6545718432510885e-05, |
|
"loss": 7.0001, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.6502177068214805e-05, |
|
"loss": 6.7307, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.6458635703918722e-05, |
|
"loss": 6.8449, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.6415094339622643e-05, |
|
"loss": 6.6097, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.6371552975326563e-05, |
|
"loss": 6.8532, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.6328011611030476e-05, |
|
"loss": 6.8925, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.6284470246734397e-05, |
|
"loss": 6.8499, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.6240928882438317e-05, |
|
"loss": 6.7906, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.6197387518142237e-05, |
|
"loss": 6.8578, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.6153846153846154e-05, |
|
"loss": 6.9541, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.6110304789550074e-05, |
|
"loss": 6.8053, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.6066763425253995e-05, |
|
"loss": 6.8078, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.6023222060957908e-05, |
|
"loss": 6.9305, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.5979680696661828e-05, |
|
"loss": 6.9147, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.593613933236575e-05, |
|
"loss": 6.8646, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.5892597968069665e-05, |
|
"loss": 7.0318, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.5849056603773586e-05, |
|
"loss": 6.8768, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.5805515239477506e-05, |
|
"loss": 6.6636, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.5761973875181423e-05, |
|
"loss": 6.7513, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.571843251088534e-05, |
|
"loss": 6.9217, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.567489114658926e-05, |
|
"loss": 6.8903, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.563134978229318e-05, |
|
"loss": 7.0025, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.5587808417997097e-05, |
|
"loss": 6.926, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.5544267053701017e-05, |
|
"loss": 6.5696, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.5500725689404938e-05, |
|
"loss": 7.0113, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.5457184325108855e-05, |
|
"loss": 6.793, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.541364296081277e-05, |
|
"loss": 6.9198, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.537010159651669e-05, |
|
"loss": 6.761, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.532656023222061e-05, |
|
"loss": 7.0276, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.528301886792453e-05, |
|
"loss": 6.6037, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.5239477503628449e-05, |
|
"loss": 6.7909, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.5195936139332364e-05, |
|
"loss": 6.7064, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.5152394775036285e-05, |
|
"loss": 6.6922, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.5108853410740205e-05, |
|
"loss": 6.4188, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.506531204644412e-05, |
|
"loss": 6.769, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.502177068214804e-05, |
|
"loss": 6.6032, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.497822931785196e-05, |
|
"loss": 7.065, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4934687953555879e-05, |
|
"loss": 6.877, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4891146589259796e-05, |
|
"loss": 6.9379, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4847605224963716e-05, |
|
"loss": 6.9983, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4804063860667635e-05, |
|
"loss": 6.8895, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4760522496371553e-05, |
|
"loss": 6.9192, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4716981132075472e-05, |
|
"loss": 6.7546, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.467343976777939e-05, |
|
"loss": 6.9329, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.462989840348331e-05, |
|
"loss": 6.7429, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4586357039187228e-05, |
|
"loss": 6.8161, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4542815674891146e-05, |
|
"loss": 6.8705, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4499274310595066e-05, |
|
"loss": 6.896, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.4455732946298983e-05, |
|
"loss": 6.6946, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.4412191582002904e-05, |
|
"loss": 6.8785, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.4368650217706822e-05, |
|
"loss": 6.91, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.432510885341074e-05, |
|
"loss": 6.7578, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.428156748911466e-05, |
|
"loss": 6.7131, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.4238026124818578e-05, |
|
"loss": 6.7858, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.4194484760522498e-05, |
|
"loss": 6.7311, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.4150943396226415e-05, |
|
"loss": 6.6836, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.4107402031930334e-05, |
|
"loss": 6.7571, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.4063860667634254e-05, |
|
"loss": 6.6392, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.402031930333817e-05, |
|
"loss": 6.9785, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.397677793904209e-05, |
|
"loss": 6.9024, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.393323657474601e-05, |
|
"loss": 6.9245, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3889695210449928e-05, |
|
"loss": 6.7119, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3846153846153847e-05, |
|
"loss": 6.7674, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3802612481857765e-05, |
|
"loss": 6.9421, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3759071117561684e-05, |
|
"loss": 6.7645, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3715529753265602e-05, |
|
"loss": 6.926, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3671988388969521e-05, |
|
"loss": 6.7802, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.362844702467344e-05, |
|
"loss": 6.8721, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.358490566037736e-05, |
|
"loss": 6.9203, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3541364296081277e-05, |
|
"loss": 6.9149, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3497822931785197e-05, |
|
"loss": 6.7289, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3454281567489116e-05, |
|
"loss": 6.853, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.3410740203193032e-05, |
|
"loss": 6.7934, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.3367198838896953e-05, |
|
"loss": 6.8075, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.3323657474600871e-05, |
|
"loss": 6.8848, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.328011611030479e-05, |
|
"loss": 6.671, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.3236574746008708e-05, |
|
"loss": 6.9252, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.3193033381712627e-05, |
|
"loss": 6.8196, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.3149492017416547e-05, |
|
"loss": 6.6605, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.3105950653120464e-05, |
|
"loss": 6.6684, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.3062409288824383e-05, |
|
"loss": 6.7128, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.3018867924528303e-05, |
|
"loss": 6.705, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.2975326560232222e-05, |
|
"loss": 6.5003, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.2931785195936138e-05, |
|
"loss": 6.28, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2888243831640059e-05, |
|
"loss": 6.4944, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2844702467343977e-05, |
|
"loss": 6.0975, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2801161103047896e-05, |
|
"loss": 6.9777, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2757619738751814e-05, |
|
"loss": 6.9377, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2714078374455733e-05, |
|
"loss": 6.9563, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2670537010159652e-05, |
|
"loss": 6.7573, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.262699564586357e-05, |
|
"loss": 6.9223, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.258345428156749e-05, |
|
"loss": 6.9422, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2539912917271409e-05, |
|
"loss": 6.806, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2496371552975326e-05, |
|
"loss": 7.1837, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2452830188679246e-05, |
|
"loss": 6.8731, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.2409288824383165e-05, |
|
"loss": 6.8866, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.2365747460087082e-05, |
|
"loss": 6.8061, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.2322206095791002e-05, |
|
"loss": 6.8514, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.227866473149492e-05, |
|
"loss": 6.8401, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.223512336719884e-05, |
|
"loss": 6.7048, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.2191582002902758e-05, |
|
"loss": 6.826, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.2148040638606676e-05, |
|
"loss": 6.8965, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.2104499274310596e-05, |
|
"loss": 6.8272, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.2060957910014513e-05, |
|
"loss": 6.8323, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.2017416545718432e-05, |
|
"loss": 6.9279, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.1973875181422352e-05, |
|
"loss": 6.8667, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.193033381712627e-05, |
|
"loss": 6.773, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.188679245283019e-05, |
|
"loss": 6.9603, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.1843251088534108e-05, |
|
"loss": 6.8699, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.1799709724238026e-05, |
|
"loss": 6.8946, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.1756168359941945e-05, |
|
"loss": 6.7076, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.1712626995645864e-05, |
|
"loss": 6.8156, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.1669085631349782e-05, |
|
"loss": 6.9208, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.1625544267053702e-05, |
|
"loss": 6.8581, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.158200290275762e-05, |
|
"loss": 6.7359, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.153846153846154e-05, |
|
"loss": 6.8063, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.1494920174165458e-05, |
|
"loss": 6.8592, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.1451378809869375e-05, |
|
"loss": 6.8561, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.1407837445573295e-05, |
|
"loss": 6.9459, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.1364296081277214e-05, |
|
"loss": 6.8407, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.1320754716981132e-05, |
|
"loss": 6.7902, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.1277213352685051e-05, |
|
"loss": 6.8517, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.123367198838897e-05, |
|
"loss": 6.7892, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.119013062409289e-05, |
|
"loss": 6.9927, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.1146589259796807e-05, |
|
"loss": 6.8108, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.1103047895500725e-05, |
|
"loss": 6.737, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.1059506531204646e-05, |
|
"loss": 6.7429, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.1015965166908562e-05, |
|
"loss": 6.8897, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.0972423802612483e-05, |
|
"loss": 6.5433, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.0928882438316401e-05, |
|
"loss": 6.5135, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.088534107402032e-05, |
|
"loss": 6.8652, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.0841799709724238e-05, |
|
"loss": 6.6219, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.0798258345428157e-05, |
|
"loss": 6.4837, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.0754716981132076e-05, |
|
"loss": 6.3776, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.0711175616835994e-05, |
|
"loss": 6.4048, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.0667634252539913e-05, |
|
"loss": 6.2509, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.0624092888243833e-05, |
|
"loss": 6.8918, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.0580551523947752e-05, |
|
"loss": 6.9234, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.0537010159651668e-05, |
|
"loss": 6.8293, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.0493468795355589e-05, |
|
"loss": 6.669, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.0449927431059507e-05, |
|
"loss": 6.8053, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.0406386066763424e-05, |
|
"loss": 6.9721, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.0362844702467344e-05, |
|
"loss": 6.8532, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.0319303338171263e-05, |
|
"loss": 6.9768, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.0275761973875182e-05, |
|
"loss": 6.8408, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.02322206095791e-05, |
|
"loss": 6.7216, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.0188679245283019e-05, |
|
"loss": 6.7617, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.0145137880986939e-05, |
|
"loss": 6.765, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.0101596516690856e-05, |
|
"loss": 6.7081, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.0058055152394774e-05, |
|
"loss": 6.8027, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.0014513788098695e-05, |
|
"loss": 6.8416, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.970972423802612e-06, |
|
"loss": 6.8549, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.927431059506532e-06, |
|
"loss": 7.026, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.88388969521045e-06, |
|
"loss": 6.863, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.840348330914369e-06, |
|
"loss": 6.5742, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.796806966618287e-06, |
|
"loss": 6.8582, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.753265602322206e-06, |
|
"loss": 6.7078, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.709724238026126e-06, |
|
"loss": 6.6839, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.666182873730043e-06, |
|
"loss": 6.7402, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.622641509433962e-06, |
|
"loss": 6.8191, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.579100145137882e-06, |
|
"loss": 6.8586, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.5355587808418e-06, |
|
"loss": 6.8754, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.492017416545717e-06, |
|
"loss": 6.7543, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.448476052249638e-06, |
|
"loss": 6.8026, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.404934687953556e-06, |
|
"loss": 6.7462, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.361393323657475e-06, |
|
"loss": 6.8254, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.317851959361393e-06, |
|
"loss": 7.0929, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.274310595065312e-06, |
|
"loss": 6.871, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.230769230769232e-06, |
|
"loss": 6.8445, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.18722786647315e-06, |
|
"loss": 7.0254, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.143686502177068e-06, |
|
"loss": 6.7171, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.100145137880988e-06, |
|
"loss": 6.7767, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.056603773584905e-06, |
|
"loss": 6.8884, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.013062409288825e-06, |
|
"loss": 6.8049, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.969521044992744e-06, |
|
"loss": 6.7851, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.925979680696662e-06, |
|
"loss": 6.7375, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.882438316400581e-06, |
|
"loss": 6.7886, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.8388969521045e-06, |
|
"loss": 6.8043, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.795355587808418e-06, |
|
"loss": 6.6738, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.751814223512337e-06, |
|
"loss": 6.9183, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.708272859216255e-06, |
|
"loss": 6.7654, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.664731494920175e-06, |
|
"loss": 7.083, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.621190130624092e-06, |
|
"loss": 6.7406, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.577648766328011e-06, |
|
"loss": 6.54, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.534107402031931e-06, |
|
"loss": 6.5534, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.49056603773585e-06, |
|
"loss": 6.378, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"eval_loss": 6.714595317840576, |
|
"eval_runtime": 685.6597, |
|
"eval_samples_per_second": 3.853, |
|
"eval_steps_per_second": 0.322, |
|
"eval_wer": 1.2430700240093657, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.447024673439768e-06, |
|
"loss": 6.824, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.403483309143687e-06, |
|
"loss": 6.8868, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.359941944847605e-06, |
|
"loss": 6.8406, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.316400580551524e-06, |
|
"loss": 6.8759, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.272859216255443e-06, |
|
"loss": 6.8051, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 8.229317851959361e-06, |
|
"loss": 6.8795, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 8.185776487663281e-06, |
|
"loss": 6.7839, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 8.142235123367198e-06, |
|
"loss": 6.6886, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 8.098693759071119e-06, |
|
"loss": 6.935, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 8.055152394775037e-06, |
|
"loss": 6.6507, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 8.011611030478954e-06, |
|
"loss": 6.7456, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.968069666182874e-06, |
|
"loss": 6.8132, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.924528301886793e-06, |
|
"loss": 6.6683, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.880986937590711e-06, |
|
"loss": 6.7256, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.83744557329463e-06, |
|
"loss": 6.689, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.793904208998549e-06, |
|
"loss": 6.8579, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.750362844702469e-06, |
|
"loss": 6.6898, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.706821480406386e-06, |
|
"loss": 6.7691, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.663280116110304e-06, |
|
"loss": 6.9135, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.6197387518142245e-06, |
|
"loss": 6.6878, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.576197387518142e-06, |
|
"loss": 6.8522, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.53265602322206e-06, |
|
"loss": 6.9307, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.48911465892598e-06, |
|
"loss": 6.9396, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.445573294629898e-06, |
|
"loss": 6.7439, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.402031930333817e-06, |
|
"loss": 6.7135, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.358490566037736e-06, |
|
"loss": 6.7239, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.314949201741655e-06, |
|
"loss": 6.6989, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.271407837445573e-06, |
|
"loss": 6.7525, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 7.227866473149492e-06, |
|
"loss": 6.8011, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 7.184325108853411e-06, |
|
"loss": 6.7347, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 7.14078374455733e-06, |
|
"loss": 6.8327, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 7.097242380261249e-06, |
|
"loss": 6.6466, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 7.053701015965167e-06, |
|
"loss": 6.8424, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 7.010159651669085e-06, |
|
"loss": 6.7346, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.966618287373005e-06, |
|
"loss": 6.7685, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.923076923076923e-06, |
|
"loss": 6.8679, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.879535558780842e-06, |
|
"loss": 6.6327, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.8359941944847605e-06, |
|
"loss": 6.895, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.79245283018868e-06, |
|
"loss": 6.7754, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.7489114658925985e-06, |
|
"loss": 6.6316, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.705370101596516e-06, |
|
"loss": 6.6465, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.661828737300436e-06, |
|
"loss": 6.7032, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.618287373004354e-06, |
|
"loss": 6.6369, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.574746008708274e-06, |
|
"loss": 6.7194, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.531204644412191e-06, |
|
"loss": 6.7083, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.487663280116111e-06, |
|
"loss": 6.6924, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.444121915820029e-06, |
|
"loss": 6.4722, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.400580551523948e-06, |
|
"loss": 6.6804, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.3570391872278665e-06, |
|
"loss": 6.786, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.313497822931785e-06, |
|
"loss": 6.3429, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.2699564586357045e-06, |
|
"loss": 6.9516, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.226415094339623e-06, |
|
"loss": 6.7765, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.182873730043541e-06, |
|
"loss": 6.7808, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.13933236574746e-06, |
|
"loss": 6.8928, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.095791001451379e-06, |
|
"loss": 6.8988, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.052249637155298e-06, |
|
"loss": 6.8776, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.008708272859216e-06, |
|
"loss": 6.8533, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.965166908563135e-06, |
|
"loss": 6.7207, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.921625544267054e-06, |
|
"loss": 6.876, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.8780841799709725e-06, |
|
"loss": 6.8643, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.834542815674891e-06, |
|
"loss": 6.9022, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.79100145137881e-06, |
|
"loss": 6.7957, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.747460087082729e-06, |
|
"loss": 6.8756, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.703918722786648e-06, |
|
"loss": 6.7472, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.660377358490566e-06, |
|
"loss": 6.7388, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.616835994194485e-06, |
|
"loss": 6.7771, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.573294629898403e-06, |
|
"loss": 6.7268, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.529753265602323e-06, |
|
"loss": 6.8189, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.486211901306241e-06, |
|
"loss": 6.9078, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.44267053701016e-06, |
|
"loss": 6.8513, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.3991291727140785e-06, |
|
"loss": 6.6771, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.355587808417997e-06, |
|
"loss": 6.821, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.3120464441219165e-06, |
|
"loss": 6.6158, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.268505079825834e-06, |
|
"loss": 6.7374, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.224963715529754e-06, |
|
"loss": 6.4849, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.181422351233672e-06, |
|
"loss": 6.8353, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 5.137880986937591e-06, |
|
"loss": 7.0018, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 5.094339622641509e-06, |
|
"loss": 6.8767, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 5.050798258345428e-06, |
|
"loss": 6.668, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 5.007256894049347e-06, |
|
"loss": 6.7554, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.963715529753266e-06, |
|
"loss": 6.7884, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.9201741654571845e-06, |
|
"loss": 7.0688, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.876632801161103e-06, |
|
"loss": 6.598, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.833091436865022e-06, |
|
"loss": 6.7801, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.789550072568941e-06, |
|
"loss": 6.7619, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.746008708272859e-06, |
|
"loss": 6.8128, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.702467343976778e-06, |
|
"loss": 6.756, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.658925979680697e-06, |
|
"loss": 6.7914, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.615384615384616e-06, |
|
"loss": 6.7859, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.571843251088534e-06, |
|
"loss": 6.6515, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.5283018867924524e-06, |
|
"loss": 6.7337, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.484760522496372e-06, |
|
"loss": 6.6213, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.4412191582002904e-06, |
|
"loss": 6.619, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.397677793904209e-06, |
|
"loss": 6.8495, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.354136429608128e-06, |
|
"loss": 6.5749, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.310595065312046e-06, |
|
"loss": 6.7525, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.2670537010159656e-06, |
|
"loss": 6.5626, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.223512336719884e-06, |
|
"loss": 6.7664, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.179970972423803e-06, |
|
"loss": 6.8359, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.136429608127721e-06, |
|
"loss": 5.9896, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.092888243831641e-06, |
|
"loss": 6.8314, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.049346879535559e-06, |
|
"loss": 6.8609, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.005805515239477e-06, |
|
"loss": 6.8985, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.962264150943396e-06, |
|
"loss": 6.7312, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.918722786647315e-06, |
|
"loss": 6.7393, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.875181422351234e-06, |
|
"loss": 6.6446, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.831640058055152e-06, |
|
"loss": 7.0349, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.788098693759071e-06, |
|
"loss": 6.9525, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.74455732946299e-06, |
|
"loss": 6.8194, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.7010159651669087e-06, |
|
"loss": 6.722, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.6574746008708277e-06, |
|
"loss": 6.7443, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.613933236574746e-06, |
|
"loss": 6.8892, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.570391872278665e-06, |
|
"loss": 6.6213, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.5268505079825834e-06, |
|
"loss": 6.6964, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.4833091436865024e-06, |
|
"loss": 6.9253, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.439767779390421e-06, |
|
"loss": 6.8583, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.39622641509434e-06, |
|
"loss": 6.8969, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.352685050798258e-06, |
|
"loss": 6.8028, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.309143686502177e-06, |
|
"loss": 6.6186, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.2656023222060957e-06, |
|
"loss": 6.9412, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.2220609579100147e-06, |
|
"loss": 6.7968, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.1785195936139333e-06, |
|
"loss": 7.016, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.1349782293178522e-06, |
|
"loss": 6.7541, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.0914368650217704e-06, |
|
"loss": 6.9549, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.0478955007256894e-06, |
|
"loss": 6.9793, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.004354136429608e-06, |
|
"loss": 6.7476, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.960812772133527e-06, |
|
"loss": 6.818, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.9172714078374455e-06, |
|
"loss": 6.7245, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.8737300435413645e-06, |
|
"loss": 6.7706, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.830188679245283e-06, |
|
"loss": 6.7443, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.7866473149492017e-06, |
|
"loss": 6.6402, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.7431059506531207e-06, |
|
"loss": 6.6075, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.6995645863570392e-06, |
|
"loss": 6.9054, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.6560232220609582e-06, |
|
"loss": 6.7971, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.612481857764877e-06, |
|
"loss": 6.9146, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.5689404934687954e-06, |
|
"loss": 6.7406, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.525399129172714e-06, |
|
"loss": 6.8299, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.481857764876633e-06, |
|
"loss": 6.7729, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.4383164005805515e-06, |
|
"loss": 6.7405, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.3947750362844705e-06, |
|
"loss": 6.6303, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.351233671988389e-06, |
|
"loss": 6.8663, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.307692307692308e-06, |
|
"loss": 6.7374, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.2641509433962262e-06, |
|
"loss": 6.7383, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.2206095791001452e-06, |
|
"loss": 6.7889, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.177068214804064e-06, |
|
"loss": 6.5223, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.1335268505079828e-06, |
|
"loss": 6.871, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.0899854862119014e-06, |
|
"loss": 6.5354, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.0464441219158204e-06, |
|
"loss": 6.3837, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.0029027576197385e-06, |
|
"loss": 6.6686, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.9593613933236575e-06, |
|
"loss": 6.4184, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.915820029027576e-06, |
|
"loss": 6.8471, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.872278664731495e-06, |
|
"loss": 6.7201, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.8287373004354138e-06, |
|
"loss": 6.9244, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.7851959361393324e-06, |
|
"loss": 6.7686, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.7416545718432512e-06, |
|
"loss": 6.8244, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.69811320754717e-06, |
|
"loss": 6.9997, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.6545718432510886e-06, |
|
"loss": 6.7436, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.6110304789550073e-06, |
|
"loss": 6.7031, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5674891146589261e-06, |
|
"loss": 6.7828, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5239477503628447e-06, |
|
"loss": 6.8203, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.4804063860667635e-06, |
|
"loss": 6.9727, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.4368650217706823e-06, |
|
"loss": 6.8576, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.3933236574746008e-06, |
|
"loss": 6.934, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.3497822931785196e-06, |
|
"loss": 6.7721, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.3062409288824384e-06, |
|
"loss": 6.7792, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.262699564586357e-06, |
|
"loss": 6.9853, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.2191582002902758e-06, |
|
"loss": 6.6124, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.1756168359941945e-06, |
|
"loss": 6.7884, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.1320754716981131e-06, |
|
"loss": 6.7028, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.088534107402032e-06, |
|
"loss": 6.6828, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0449927431059507e-06, |
|
"loss": 6.7638, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0014513788098693e-06, |
|
"loss": 6.66, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 9.57910014513788e-07, |
|
"loss": 6.8669, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 9.143686502177069e-07, |
|
"loss": 6.8665, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 8.708272859216256e-07, |
|
"loss": 6.6172, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 8.272859216255443e-07, |
|
"loss": 6.5918, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 7.837445573294631e-07, |
|
"loss": 6.7592, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 7.402031930333817e-07, |
|
"loss": 6.8729, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.966618287373004e-07, |
|
"loss": 6.5352, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.531204644412192e-07, |
|
"loss": 6.9544, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.095791001451379e-07, |
|
"loss": 6.6757, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.660377358490566e-07, |
|
"loss": 6.8157, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.224963715529753e-07, |
|
"loss": 6.7616, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.78955007256894e-07, |
|
"loss": 6.681, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.354136429608128e-07, |
|
"loss": 6.6513, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.9187227866473153e-07, |
|
"loss": 6.5563, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.483309143686502e-07, |
|
"loss": 6.5301, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.0478955007256894e-07, |
|
"loss": 6.3966, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 2.6124818577648767e-07, |
|
"loss": 6.4156, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 1189, |
|
"total_flos": 0.0, |
|
"train_loss": 6.949796647760826, |
|
"train_runtime": 6133.846, |
|
"train_samples_per_second": 4.653, |
|
"train_steps_per_second": 0.194 |
|
} |
|
], |
|
"max_steps": 1189, |
|
"num_train_epochs": 1, |
|
"total_flos": 0.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|