|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"global_step": 1784, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 9.1616, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 9.4498, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 9.2391, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 9.3924, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.0000000000000002e-07, |
|
"loss": 8.6141, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 8.8045, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 6.000000000000001e-07, |
|
"loss": 8.6125, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 8.7679, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 9.4533, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 8.8176, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 8.4298, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.4000000000000001e-06, |
|
"loss": 9.173, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 8.6383, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.8e-06, |
|
"loss": 8.5711, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 8.3087, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.2e-06, |
|
"loss": 8.0666, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.2e-06, |
|
"loss": 8.0747, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.2e-06, |
|
"loss": 8.0492, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 8.2339, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.6e-06, |
|
"loss": 8.2345, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 8.041, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3e-06, |
|
"loss": 8.0229, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 8.4345, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.4000000000000005e-06, |
|
"loss": 8.2216, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.6e-06, |
|
"loss": 8.0237, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.8e-06, |
|
"loss": 8.4366, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 7.9684, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.2000000000000004e-06, |
|
"loss": 7.8959, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.4e-06, |
|
"loss": 8.0038, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6e-06, |
|
"loss": 8.6603, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 8.447, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 5e-06, |
|
"loss": 7.8605, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 5.2e-06, |
|
"loss": 7.7414, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 5.4e-06, |
|
"loss": 7.6243, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 7.6788, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 5.8e-06, |
|
"loss": 7.6921, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 6e-06, |
|
"loss": 7.4842, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 6.2e-06, |
|
"loss": 7.6418, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 7.8088, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 6.6e-06, |
|
"loss": 7.5394, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 6.800000000000001e-06, |
|
"loss": 7.5118, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 7.000000000000001e-06, |
|
"loss": 7.4209, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 7.2e-06, |
|
"loss": 7.6998, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 7.4e-06, |
|
"loss": 7.4258, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 7.6e-06, |
|
"loss": 7.7456, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 7.8e-06, |
|
"loss": 7.5286, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 7.4229, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.200000000000001e-06, |
|
"loss": 7.7719, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 7.5256, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.599999999999999e-06, |
|
"loss": 7.7009, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.8e-06, |
|
"loss": 7.3338, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9e-06, |
|
"loss": 7.4398, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.2e-06, |
|
"loss": 7.2253, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.4e-06, |
|
"loss": 7.3027, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 7.5319, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.800000000000001e-06, |
|
"loss": 7.2412, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1e-05, |
|
"loss": 7.1937, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.02e-05, |
|
"loss": 7.1008, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.04e-05, |
|
"loss": 7.1326, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.06e-05, |
|
"loss": 7.167, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.08e-05, |
|
"loss": 7.301, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.1000000000000001e-05, |
|
"loss": 7.2863, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.1200000000000001e-05, |
|
"loss": 7.3227, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.1400000000000001e-05, |
|
"loss": 7.2021, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.16e-05, |
|
"loss": 7.1497, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.18e-05, |
|
"loss": 7.1492, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.2e-05, |
|
"loss": 7.1561, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.22e-05, |
|
"loss": 7.1566, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.24e-05, |
|
"loss": 7.232, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.2600000000000001e-05, |
|
"loss": 7.0286, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.2800000000000001e-05, |
|
"loss": 7.1423, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.3000000000000001e-05, |
|
"loss": 7.1052, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.32e-05, |
|
"loss": 7.1983, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.3400000000000002e-05, |
|
"loss": 7.0521, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.3600000000000002e-05, |
|
"loss": 7.3371, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.3800000000000002e-05, |
|
"loss": 7.1316, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.4000000000000001e-05, |
|
"loss": 7.2627, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.42e-05, |
|
"loss": 7.0736, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.44e-05, |
|
"loss": 7.0296, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.4599999999999999e-05, |
|
"loss": 7.0002, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.48e-05, |
|
"loss": 7.1856, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.5e-05, |
|
"loss": 7.2838, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.52e-05, |
|
"loss": 6.9796, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.54e-05, |
|
"loss": 7.0743, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.56e-05, |
|
"loss": 7.1287, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.58e-05, |
|
"loss": 7.1552, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 7.2126, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.62e-05, |
|
"loss": 7.1017, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.6400000000000002e-05, |
|
"loss": 7.1621, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.66e-05, |
|
"loss": 6.9273, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.6800000000000002e-05, |
|
"loss": 7.2059, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.7000000000000003e-05, |
|
"loss": 6.8581, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.7199999999999998e-05, |
|
"loss": 6.9326, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.74e-05, |
|
"loss": 7.2143, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.76e-05, |
|
"loss": 7.1836, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.78e-05, |
|
"loss": 6.9605, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.8e-05, |
|
"loss": 6.8981, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.8200000000000002e-05, |
|
"loss": 6.6154, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.84e-05, |
|
"loss": 6.9042, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.86e-05, |
|
"loss": 6.7094, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.88e-05, |
|
"loss": 7.3051, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9e-05, |
|
"loss": 7.2407, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9200000000000003e-05, |
|
"loss": 7.2231, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.94e-05, |
|
"loss": 7.1201, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9600000000000002e-05, |
|
"loss": 7.0411, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9800000000000004e-05, |
|
"loss": 6.9956, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2e-05, |
|
"loss": 7.4416, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.0200000000000003e-05, |
|
"loss": 7.1068, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.04e-05, |
|
"loss": 6.9827, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.06e-05, |
|
"loss": 7.1509, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.08e-05, |
|
"loss": 6.9555, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.1e-05, |
|
"loss": 6.8866, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.12e-05, |
|
"loss": 7.1114, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.1400000000000002e-05, |
|
"loss": 7.1804, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.16e-05, |
|
"loss": 7.0321, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.18e-05, |
|
"loss": 7.1007, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.2000000000000003e-05, |
|
"loss": 7.1832, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.22e-05, |
|
"loss": 6.944, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.2400000000000002e-05, |
|
"loss": 7.041, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.26e-05, |
|
"loss": 7.0734, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.2800000000000002e-05, |
|
"loss": 7.2399, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.3000000000000003e-05, |
|
"loss": 7.1685, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.32e-05, |
|
"loss": 7.1381, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.3400000000000003e-05, |
|
"loss": 7.1588, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.36e-05, |
|
"loss": 6.83, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.38e-05, |
|
"loss": 7.3076, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.4e-05, |
|
"loss": 7.0183, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.4200000000000002e-05, |
|
"loss": 7.0584, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.44e-05, |
|
"loss": 6.9407, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.46e-05, |
|
"loss": 6.9684, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.48e-05, |
|
"loss": 7.2447, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.5e-05, |
|
"loss": 7.0475, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.5200000000000003e-05, |
|
"loss": 7.1777, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.54e-05, |
|
"loss": 6.975, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.5600000000000002e-05, |
|
"loss": 6.9606, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.58e-05, |
|
"loss": 7.212, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.6000000000000002e-05, |
|
"loss": 7.0825, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.6200000000000003e-05, |
|
"loss": 7.0332, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.64e-05, |
|
"loss": 7.1766, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.6600000000000003e-05, |
|
"loss": 7.1125, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.6800000000000004e-05, |
|
"loss": 7.29, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.7000000000000002e-05, |
|
"loss": 7.23, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.7200000000000004e-05, |
|
"loss": 6.9397, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.7400000000000002e-05, |
|
"loss": 7.0627, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.7600000000000003e-05, |
|
"loss": 7.189, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.7800000000000005e-05, |
|
"loss": 7.0977, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 7.0848, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.8199999999999998e-05, |
|
"loss": 7.0181, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.84e-05, |
|
"loss": 7.0304, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.86e-05, |
|
"loss": 6.2391, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.88e-05, |
|
"loss": 7.0137, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.9e-05, |
|
"loss": 7.0504, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.9199999999999998e-05, |
|
"loss": 6.9482, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.94e-05, |
|
"loss": 7.2289, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.96e-05, |
|
"loss": 6.8602, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.98e-05, |
|
"loss": 7.1883, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3e-05, |
|
"loss": 7.0202, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.02e-05, |
|
"loss": 7.017, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.04e-05, |
|
"loss": 7.0493, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.06e-05, |
|
"loss": 7.1727, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.08e-05, |
|
"loss": 7.1512, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.1e-05, |
|
"loss": 7.2892, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.12e-05, |
|
"loss": 7.0769, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.1400000000000004e-05, |
|
"loss": 7.0605, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.16e-05, |
|
"loss": 7.0695, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.18e-05, |
|
"loss": 7.1137, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 7.0851, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.2200000000000003e-05, |
|
"loss": 6.8918, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.24e-05, |
|
"loss": 6.8247, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.26e-05, |
|
"loss": 7.1317, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.2800000000000004e-05, |
|
"loss": 7.0571, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.3e-05, |
|
"loss": 6.9825, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.32e-05, |
|
"loss": 6.99, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.3400000000000005e-05, |
|
"loss": 6.9346, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.3600000000000004e-05, |
|
"loss": 7.0916, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.38e-05, |
|
"loss": 6.8672, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.4000000000000007e-05, |
|
"loss": 6.8406, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.4200000000000005e-05, |
|
"loss": 6.9888, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.4399999999999996e-05, |
|
"loss": 6.8488, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.46e-05, |
|
"loss": 6.9961, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.48e-05, |
|
"loss": 6.8721, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.5e-05, |
|
"loss": 6.8678, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.52e-05, |
|
"loss": 7.0781, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.54e-05, |
|
"loss": 6.7809, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.56e-05, |
|
"loss": 6.9891, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.58e-05, |
|
"loss": 6.9271, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.6e-05, |
|
"loss": 6.8859, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.62e-05, |
|
"loss": 6.9126, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.6400000000000004e-05, |
|
"loss": 6.8977, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.66e-05, |
|
"loss": 6.8927, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.68e-05, |
|
"loss": 7.0939, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.7e-05, |
|
"loss": 6.7854, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.72e-05, |
|
"loss": 6.8186, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.74e-05, |
|
"loss": 6.9242, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.76e-05, |
|
"loss": 6.9644, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.7800000000000004e-05, |
|
"loss": 6.9289, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.8e-05, |
|
"loss": 6.8825, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.82e-05, |
|
"loss": 6.6243, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.8400000000000005e-05, |
|
"loss": 6.6316, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.86e-05, |
|
"loss": 6.3754, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.88e-05, |
|
"loss": 7.1614, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.9000000000000006e-05, |
|
"loss": 6.9473, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.9200000000000004e-05, |
|
"loss": 7.3062, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.94e-05, |
|
"loss": 6.8621, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.960000000000001e-05, |
|
"loss": 6.8983, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 3.9800000000000005e-05, |
|
"loss": 7.0148, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4e-05, |
|
"loss": 7.0128, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.02e-05, |
|
"loss": 6.8904, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.0400000000000006e-05, |
|
"loss": 6.9903, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.0600000000000004e-05, |
|
"loss": 7.145, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.08e-05, |
|
"loss": 6.9505, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.1e-05, |
|
"loss": 7.0709, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.12e-05, |
|
"loss": 6.9791, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.14e-05, |
|
"loss": 6.7219, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.16e-05, |
|
"loss": 7.0825, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.18e-05, |
|
"loss": 6.7478, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.2e-05, |
|
"loss": 6.9912, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.22e-05, |
|
"loss": 6.8563, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.24e-05, |
|
"loss": 6.9062, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.26e-05, |
|
"loss": 6.9776, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.2800000000000004e-05, |
|
"loss": 7.0412, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.3e-05, |
|
"loss": 7.1644, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.32e-05, |
|
"loss": 6.8507, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.3400000000000005e-05, |
|
"loss": 6.9205, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.36e-05, |
|
"loss": 7.0125, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.38e-05, |
|
"loss": 7.2351, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 7.188, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.4200000000000004e-05, |
|
"loss": 7.0357, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.44e-05, |
|
"loss": 6.863, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.46e-05, |
|
"loss": 6.946, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.4800000000000005e-05, |
|
"loss": 6.9235, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.5e-05, |
|
"loss": 7.0466, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.52e-05, |
|
"loss": 7.1308, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.5400000000000006e-05, |
|
"loss": 6.9522, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.5600000000000004e-05, |
|
"loss": 6.8172, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.58e-05, |
|
"loss": 7.2164, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 7.0156, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.6200000000000005e-05, |
|
"loss": 6.9764, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.64e-05, |
|
"loss": 6.9341, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.660000000000001e-05, |
|
"loss": 6.9524, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.6800000000000006e-05, |
|
"loss": 6.9046, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.7e-05, |
|
"loss": 6.8651, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.72e-05, |
|
"loss": 6.7141, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.74e-05, |
|
"loss": 6.9299, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.76e-05, |
|
"loss": 6.8534, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.78e-05, |
|
"loss": 7.0233, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.8e-05, |
|
"loss": 6.4266, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.82e-05, |
|
"loss": 6.8092, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.8400000000000004e-05, |
|
"loss": 6.4207, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.86e-05, |
|
"loss": 6.3026, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.88e-05, |
|
"loss": 7.0456, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.9e-05, |
|
"loss": 7.3466, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.92e-05, |
|
"loss": 7.0408, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.94e-05, |
|
"loss": 7.1668, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.96e-05, |
|
"loss": 7.0237, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.9800000000000004e-05, |
|
"loss": 6.9889, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 5e-05, |
|
"loss": 7.035, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 5.02e-05, |
|
"loss": 6.9818, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.0400000000000005e-05, |
|
"loss": 7.2504, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.0600000000000003e-05, |
|
"loss": 6.6811, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.08e-05, |
|
"loss": 6.835, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.1000000000000006e-05, |
|
"loss": 6.8678, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.1200000000000004e-05, |
|
"loss": 7.1576, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.14e-05, |
|
"loss": 7.1024, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.16e-05, |
|
"loss": 6.9567, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.1800000000000005e-05, |
|
"loss": 6.8086, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.2000000000000004e-05, |
|
"loss": 7.0584, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.22e-05, |
|
"loss": 7.007, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.2400000000000007e-05, |
|
"loss": 7.0871, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.2600000000000005e-05, |
|
"loss": 6.8458, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.28e-05, |
|
"loss": 6.8912, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.300000000000001e-05, |
|
"loss": 6.9293, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.3200000000000006e-05, |
|
"loss": 6.8123, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.3400000000000004e-05, |
|
"loss": 6.9474, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.360000000000001e-05, |
|
"loss": 7.0488, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.380000000000001e-05, |
|
"loss": 7.1216, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.4000000000000005e-05, |
|
"loss": 6.9446, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.420000000000001e-05, |
|
"loss": 7.1616, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.440000000000001e-05, |
|
"loss": 6.8666, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.4600000000000006e-05, |
|
"loss": 6.8239, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.4800000000000004e-05, |
|
"loss": 6.9709, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 7.1768, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.520000000000001e-05, |
|
"loss": 7.0812, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.5400000000000005e-05, |
|
"loss": 6.682, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.560000000000001e-05, |
|
"loss": 7.1171, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.580000000000001e-05, |
|
"loss": 6.7977, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.6000000000000006e-05, |
|
"loss": 6.9057, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.620000000000001e-05, |
|
"loss": 7.089, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.6399999999999995e-05, |
|
"loss": 6.7968, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.66e-05, |
|
"loss": 7.0452, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.68e-05, |
|
"loss": 7.0024, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.6999999999999996e-05, |
|
"loss": 7.1462, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.72e-05, |
|
"loss": 6.9404, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 5.74e-05, |
|
"loss": 6.9575, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 5.76e-05, |
|
"loss": 6.7984, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 5.7799999999999995e-05, |
|
"loss": 6.6834, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 5.8e-05, |
|
"loss": 6.7387, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 5.82e-05, |
|
"loss": 6.9219, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 5.8399999999999997e-05, |
|
"loss": 6.7042, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 5.86e-05, |
|
"loss": 6.2036, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 5.88e-05, |
|
"loss": 6.9522, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 5.9e-05, |
|
"loss": 6.924, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 5.92e-05, |
|
"loss": 6.9845, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 5.94e-05, |
|
"loss": 6.8166, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 5.96e-05, |
|
"loss": 6.9723, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 5.9800000000000003e-05, |
|
"loss": 6.9201, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 6e-05, |
|
"loss": 7.0092, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 6.02e-05, |
|
"loss": 7.1132, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 6.04e-05, |
|
"loss": 6.9393, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 6.06e-05, |
|
"loss": 7.0828, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 6.08e-05, |
|
"loss": 6.8783, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 6.1e-05, |
|
"loss": 7.0103, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.12e-05, |
|
"loss": 6.9497, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.14e-05, |
|
"loss": 6.9917, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.16e-05, |
|
"loss": 7.0308, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.18e-05, |
|
"loss": 6.8281, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.2e-05, |
|
"loss": 6.8573, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.220000000000001e-05, |
|
"loss": 6.8823, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.24e-05, |
|
"loss": 7.0293, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.26e-05, |
|
"loss": 7.0423, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.280000000000001e-05, |
|
"loss": 7.2528, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.3e-05, |
|
"loss": 7.1092, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.32e-05, |
|
"loss": 7.2063, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.340000000000001e-05, |
|
"loss": 6.8959, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.36e-05, |
|
"loss": 6.8466, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.38e-05, |
|
"loss": 7.0297, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.400000000000001e-05, |
|
"loss": 6.9622, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.42e-05, |
|
"loss": 6.9063, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.440000000000001e-05, |
|
"loss": 6.8759, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.460000000000001e-05, |
|
"loss": 7.0322, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.48e-05, |
|
"loss": 6.9781, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 7.0036, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.52e-05, |
|
"loss": 7.0864, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.54e-05, |
|
"loss": 7.1104, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.560000000000001e-05, |
|
"loss": 6.9095, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.58e-05, |
|
"loss": 6.9461, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.6e-05, |
|
"loss": 6.9987, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.620000000000001e-05, |
|
"loss": 6.7974, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.64e-05, |
|
"loss": 6.9084, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.66e-05, |
|
"loss": 6.988, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.680000000000001e-05, |
|
"loss": 6.7049, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.7e-05, |
|
"loss": 7.0854, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.720000000000001e-05, |
|
"loss": 7.05, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.740000000000001e-05, |
|
"loss": 6.9691, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.76e-05, |
|
"loss": 6.7041, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.780000000000001e-05, |
|
"loss": 6.7627, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 6.800000000000001e-05, |
|
"loss": 7.3683, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 6.82e-05, |
|
"loss": 6.7652, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 6.840000000000001e-05, |
|
"loss": 6.334, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 6.860000000000001e-05, |
|
"loss": 6.0494, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 6.879999999999999e-05, |
|
"loss": 7.0525, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 6.9e-05, |
|
"loss": 7.1171, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 6.92e-05, |
|
"loss": 6.9668, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 6.939999999999999e-05, |
|
"loss": 6.9117, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 6.96e-05, |
|
"loss": 6.835, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 6.98e-05, |
|
"loss": 7.0222, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 7e-05, |
|
"loss": 7.1445, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 7.02e-05, |
|
"loss": 6.9976, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 7.04e-05, |
|
"loss": 7.1503, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 7.06e-05, |
|
"loss": 7.0396, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 7.08e-05, |
|
"loss": 6.7691, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 7.1e-05, |
|
"loss": 6.6886, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 7.12e-05, |
|
"loss": 6.8942, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 7.14e-05, |
|
"loss": 6.8019, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 7.16e-05, |
|
"loss": 6.9332, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.18e-05, |
|
"loss": 7.0608, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.2e-05, |
|
"loss": 6.8797, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.22e-05, |
|
"loss": 6.8975, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.24e-05, |
|
"loss": 7.0204, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.26e-05, |
|
"loss": 6.8734, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.280000000000001e-05, |
|
"loss": 6.8151, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.3e-05, |
|
"loss": 7.1294, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.32e-05, |
|
"loss": 6.9473, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.340000000000001e-05, |
|
"loss": 6.8825, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.36e-05, |
|
"loss": 6.7154, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.38e-05, |
|
"loss": 6.9182, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.4e-05, |
|
"loss": 6.9023, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.42e-05, |
|
"loss": 6.8441, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.44e-05, |
|
"loss": 6.9761, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.46e-05, |
|
"loss": 7.0962, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.48e-05, |
|
"loss": 6.8442, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 6.7706, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 7.52e-05, |
|
"loss": 6.8681, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.54e-05, |
|
"loss": 6.9452, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.560000000000001e-05, |
|
"loss": 7.0829, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.58e-05, |
|
"loss": 7.2026, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.6e-05, |
|
"loss": 6.9318, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.620000000000001e-05, |
|
"loss": 6.7755, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.64e-05, |
|
"loss": 6.8436, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.66e-05, |
|
"loss": 7.004, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.680000000000001e-05, |
|
"loss": 6.8093, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.7e-05, |
|
"loss": 7.4031, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.72e-05, |
|
"loss": 6.9762, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.740000000000001e-05, |
|
"loss": 6.7874, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.76e-05, |
|
"loss": 6.8192, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.780000000000001e-05, |
|
"loss": 6.6398, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.800000000000001e-05, |
|
"loss": 6.5132, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.82e-05, |
|
"loss": 6.8268, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.840000000000001e-05, |
|
"loss": 6.8986, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.860000000000001e-05, |
|
"loss": 6.5213, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 7.88e-05, |
|
"loss": 7.1412, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 7.900000000000001e-05, |
|
"loss": 6.9053, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 7.920000000000001e-05, |
|
"loss": 6.8119, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 7.94e-05, |
|
"loss": 7.1109, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 7.960000000000001e-05, |
|
"loss": 6.8769, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 7.98e-05, |
|
"loss": 7.0447, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8e-05, |
|
"loss": 6.8792, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.020000000000001e-05, |
|
"loss": 6.9365, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.04e-05, |
|
"loss": 6.8985, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.060000000000001e-05, |
|
"loss": 6.8336, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.080000000000001e-05, |
|
"loss": 6.8815, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.1e-05, |
|
"loss": 6.7618, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.120000000000001e-05, |
|
"loss": 7.0093, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.14e-05, |
|
"loss": 6.7996, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.16e-05, |
|
"loss": 6.9759, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.18e-05, |
|
"loss": 6.9522, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.2e-05, |
|
"loss": 6.9069, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.22e-05, |
|
"loss": 6.8897, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.24e-05, |
|
"loss": 6.8895, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.26e-05, |
|
"loss": 7.0399, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.28e-05, |
|
"loss": 7.0276, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.3e-05, |
|
"loss": 6.9433, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.32e-05, |
|
"loss": 6.8636, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.34e-05, |
|
"loss": 7.1395, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.36e-05, |
|
"loss": 7.1488, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.38e-05, |
|
"loss": 6.9623, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.4e-05, |
|
"loss": 6.8618, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.42e-05, |
|
"loss": 6.9247, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.44e-05, |
|
"loss": 6.9291, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.46e-05, |
|
"loss": 6.9002, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.48e-05, |
|
"loss": 6.8373, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.5e-05, |
|
"loss": 6.9988, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.52e-05, |
|
"loss": 6.8496, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.54e-05, |
|
"loss": 7.0739, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.560000000000001e-05, |
|
"loss": 6.9117, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.58e-05, |
|
"loss": 6.8533, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 8.6e-05, |
|
"loss": 6.8884, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.620000000000001e-05, |
|
"loss": 7.0006, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.64e-05, |
|
"loss": 6.7802, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.66e-05, |
|
"loss": 6.9682, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.680000000000001e-05, |
|
"loss": 6.8996, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.7e-05, |
|
"loss": 6.8972, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.72e-05, |
|
"loss": 6.6445, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.740000000000001e-05, |
|
"loss": 6.7829, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.76e-05, |
|
"loss": 6.7631, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.78e-05, |
|
"loss": 7.1588, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.800000000000001e-05, |
|
"loss": 6.8277, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.82e-05, |
|
"loss": 6.8145, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.840000000000001e-05, |
|
"loss": 7.0153, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.86e-05, |
|
"loss": 6.4082, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.88e-05, |
|
"loss": 6.8712, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.900000000000001e-05, |
|
"loss": 6.9324, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.92e-05, |
|
"loss": 7.0749, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 8.94e-05, |
|
"loss": 6.9352, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 8.960000000000001e-05, |
|
"loss": 6.8809, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 8.98e-05, |
|
"loss": 7.0244, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9e-05, |
|
"loss": 6.7082, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.020000000000001e-05, |
|
"loss": 6.9073, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.04e-05, |
|
"loss": 6.7812, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.06e-05, |
|
"loss": 6.9702, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.080000000000001e-05, |
|
"loss": 6.755, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.1e-05, |
|
"loss": 6.7184, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.120000000000001e-05, |
|
"loss": 6.8317, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.140000000000001e-05, |
|
"loss": 6.9396, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.16e-05, |
|
"loss": 6.7579, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.180000000000001e-05, |
|
"loss": 7.0377, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.200000000000001e-05, |
|
"loss": 6.8726, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.22e-05, |
|
"loss": 7.0358, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.240000000000001e-05, |
|
"loss": 6.661, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.260000000000001e-05, |
|
"loss": 6.9805, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.28e-05, |
|
"loss": 6.8981, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.300000000000001e-05, |
|
"loss": 6.733, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.320000000000002e-05, |
|
"loss": 6.7853, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.340000000000001e-05, |
|
"loss": 6.8312, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.360000000000001e-05, |
|
"loss": 6.8538, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.38e-05, |
|
"loss": 6.8642, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.4e-05, |
|
"loss": 6.8786, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.42e-05, |
|
"loss": 6.8739, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.44e-05, |
|
"loss": 6.942, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.46e-05, |
|
"loss": 6.9337, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.48e-05, |
|
"loss": 7.0572, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.5e-05, |
|
"loss": 6.93, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.52e-05, |
|
"loss": 7.2985, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.54e-05, |
|
"loss": 6.8695, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.56e-05, |
|
"loss": 6.9664, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.58e-05, |
|
"loss": 6.9719, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.6e-05, |
|
"loss": 6.817, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.620000000000001e-05, |
|
"loss": 7.0931, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.64e-05, |
|
"loss": 6.8813, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.66e-05, |
|
"loss": 6.8692, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.680000000000001e-05, |
|
"loss": 6.6822, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.7e-05, |
|
"loss": 7.0572, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.72e-05, |
|
"loss": 6.9379, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.74e-05, |
|
"loss": 7.0503, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.76e-05, |
|
"loss": 6.8366, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.78e-05, |
|
"loss": 6.9263, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.8e-05, |
|
"loss": 6.6806, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.82e-05, |
|
"loss": 7.0852, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.84e-05, |
|
"loss": 6.6118, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.86e-05, |
|
"loss": 6.6509, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"eval_loss": 6.8575053215026855, |
|
"eval_runtime": 840.1793, |
|
"eval_samples_per_second": 3.145, |
|
"eval_steps_per_second": 0.394, |
|
"eval_wer": 1.3255550925650337, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.88e-05, |
|
"loss": 6.9197, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.900000000000001e-05, |
|
"loss": 6.9804, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.92e-05, |
|
"loss": 6.8762, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.94e-05, |
|
"loss": 7.0246, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.960000000000001e-05, |
|
"loss": 6.7874, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.98e-05, |
|
"loss": 7.0085, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.0001, |
|
"loss": 6.7434, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.992211838006231e-05, |
|
"loss": 7.0536, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.984423676012462e-05, |
|
"loss": 7.0318, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.976635514018692e-05, |
|
"loss": 6.9761, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.968847352024923e-05, |
|
"loss": 6.9125, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.961059190031154e-05, |
|
"loss": 6.826, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.953271028037385e-05, |
|
"loss": 6.8411, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.945482866043614e-05, |
|
"loss": 6.7741, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.937694704049845e-05, |
|
"loss": 6.9531, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.929906542056076e-05, |
|
"loss": 6.9797, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.922118380062307e-05, |
|
"loss": 6.9851, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.914330218068536e-05, |
|
"loss": 6.9887, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.906542056074767e-05, |
|
"loss": 6.8929, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.898753894080998e-05, |
|
"loss": 6.9011, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.890965732087229e-05, |
|
"loss": 6.8161, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.883177570093459e-05, |
|
"loss": 6.9713, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.87538940809969e-05, |
|
"loss": 6.7303, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.867601246105919e-05, |
|
"loss": 6.9274, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.85981308411215e-05, |
|
"loss": 6.9497, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.852024922118381e-05, |
|
"loss": 6.8621, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.84423676012461e-05, |
|
"loss": 7.1177, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.836448598130841e-05, |
|
"loss": 6.7499, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.828660436137072e-05, |
|
"loss": 6.9449, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.820872274143302e-05, |
|
"loss": 7.0588, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.813084112149533e-05, |
|
"loss": 6.7286, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.805295950155764e-05, |
|
"loss": 6.7613, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.797507788161995e-05, |
|
"loss": 6.9184, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.789719626168224e-05, |
|
"loss": 6.7438, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.781931464174455e-05, |
|
"loss": 7.1036, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.774143302180686e-05, |
|
"loss": 6.8978, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.766355140186917e-05, |
|
"loss": 6.7852, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.758566978193146e-05, |
|
"loss": 6.7405, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.750778816199377e-05, |
|
"loss": 6.969, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.742990654205608e-05, |
|
"loss": 7.1081, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.735202492211839e-05, |
|
"loss": 6.7609, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.727414330218069e-05, |
|
"loss": 7.0737, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.7196261682243e-05, |
|
"loss": 6.7939, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.71183800623053e-05, |
|
"loss": 6.7633, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.704049844236761e-05, |
|
"loss": 6.6068, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.696261682242991e-05, |
|
"loss": 6.526, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.688473520249222e-05, |
|
"loss": 6.9976, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.680685358255453e-05, |
|
"loss": 6.5767, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.672897196261684e-05, |
|
"loss": 6.3953, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.665109034267913e-05, |
|
"loss": 6.1639, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.657320872274144e-05, |
|
"loss": 7.1843, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.649532710280375e-05, |
|
"loss": 7.2188, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.641744548286606e-05, |
|
"loss": 6.9175, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.633956386292835e-05, |
|
"loss": 6.8885, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.626168224299066e-05, |
|
"loss": 6.9077, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.618380062305297e-05, |
|
"loss": 6.993, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.610591900311527e-05, |
|
"loss": 7.0176, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.602803738317758e-05, |
|
"loss": 6.9696, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.595015576323989e-05, |
|
"loss": 6.7144, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.587227414330218e-05, |
|
"loss": 6.9005, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.579439252336449e-05, |
|
"loss": 6.8746, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.571651090342679e-05, |
|
"loss": 6.8284, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.56386292834891e-05, |
|
"loss": 6.9093, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.55607476635514e-05, |
|
"loss": 7.1227, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.548286604361371e-05, |
|
"loss": 6.9711, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.540498442367601e-05, |
|
"loss": 6.8319, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.532710280373832e-05, |
|
"loss": 7.0507, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.524922118380063e-05, |
|
"loss": 6.9584, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.517133956386294e-05, |
|
"loss": 6.9772, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.509345794392523e-05, |
|
"loss": 6.9196, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.501557632398754e-05, |
|
"loss": 6.852, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.493769470404985e-05, |
|
"loss": 6.8383, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.485981308411216e-05, |
|
"loss": 6.8018, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.478193146417445e-05, |
|
"loss": 6.7024, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.470404984423676e-05, |
|
"loss": 6.8309, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.462616822429907e-05, |
|
"loss": 6.9125, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.454828660436138e-05, |
|
"loss": 6.9315, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.447040498442368e-05, |
|
"loss": 6.7255, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.439252336448599e-05, |
|
"loss": 7.0144, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.43146417445483e-05, |
|
"loss": 7.0721, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.42367601246106e-05, |
|
"loss": 6.8903, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.41588785046729e-05, |
|
"loss": 6.8397, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.408099688473521e-05, |
|
"loss": 6.7447, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.400311526479752e-05, |
|
"loss": 7.039, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.392523364485983e-05, |
|
"loss": 6.9449, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.384735202492212e-05, |
|
"loss": 6.7055, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.376947040498443e-05, |
|
"loss": 6.958, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.369158878504674e-05, |
|
"loss": 6.8422, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.361370716510905e-05, |
|
"loss": 7.0926, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.353582554517135e-05, |
|
"loss": 7.1801, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.345794392523365e-05, |
|
"loss": 6.9282, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.338006230529596e-05, |
|
"loss": 6.9216, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.330218068535826e-05, |
|
"loss": 6.5671, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.322429906542057e-05, |
|
"loss": 6.8452, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.314641744548286e-05, |
|
"loss": 6.7067, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.306853582554517e-05, |
|
"loss": 6.5783, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.299065420560748e-05, |
|
"loss": 6.5996, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.291277258566978e-05, |
|
"loss": 6.631, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.283489096573209e-05, |
|
"loss": 6.6143, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.27570093457944e-05, |
|
"loss": 6.5022, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.26791277258567e-05, |
|
"loss": 6.8367, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.2601246105919e-05, |
|
"loss": 6.9814, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.252336448598131e-05, |
|
"loss": 6.7794, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.244548286604362e-05, |
|
"loss": 6.926, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.236760124610593e-05, |
|
"loss": 6.6703, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.228971962616822e-05, |
|
"loss": 6.8657, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.221183800623053e-05, |
|
"loss": 7.1034, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.213395638629284e-05, |
|
"loss": 6.7906, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.205607476635515e-05, |
|
"loss": 6.9375, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.197819314641744e-05, |
|
"loss": 6.7964, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.190031152647975e-05, |
|
"loss": 7.1234, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.182242990654206e-05, |
|
"loss": 6.8837, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.174454828660437e-05, |
|
"loss": 6.9265, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.166666666666667e-05, |
|
"loss": 6.9301, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.158878504672898e-05, |
|
"loss": 6.8605, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.151090342679129e-05, |
|
"loss": 6.8773, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.14330218068536e-05, |
|
"loss": 6.9054, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.135514018691589e-05, |
|
"loss": 6.649, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.12772585669782e-05, |
|
"loss": 6.9492, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.119937694704051e-05, |
|
"loss": 6.8695, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.112149532710282e-05, |
|
"loss": 6.8915, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.104361370716511e-05, |
|
"loss": 6.8622, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.096573208722742e-05, |
|
"loss": 6.8106, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.088785046728973e-05, |
|
"loss": 7.1176, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.080996884735204e-05, |
|
"loss": 6.9511, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.073208722741434e-05, |
|
"loss": 6.9133, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.065420560747664e-05, |
|
"loss": 6.7806, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.057632398753894e-05, |
|
"loss": 7.0308, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.049844236760125e-05, |
|
"loss": 7.0151, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.042056074766356e-05, |
|
"loss": 6.8086, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.034267912772585e-05, |
|
"loss": 6.8521, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.026479750778816e-05, |
|
"loss": 6.7231, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 9.018691588785047e-05, |
|
"loss": 6.7798, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 9.010903426791277e-05, |
|
"loss": 6.9465, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 9.003115264797508e-05, |
|
"loss": 6.8557, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.995327102803739e-05, |
|
"loss": 6.7445, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.98753894080997e-05, |
|
"loss": 6.7408, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.979750778816199e-05, |
|
"loss": 6.6449, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.97196261682243e-05, |
|
"loss": 6.9958, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.964174454828661e-05, |
|
"loss": 6.9706, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.956386292834892e-05, |
|
"loss": 6.7737, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.948598130841121e-05, |
|
"loss": 6.8188, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.940809968847352e-05, |
|
"loss": 6.717, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.933021806853583e-05, |
|
"loss": 6.7582, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.925233644859814e-05, |
|
"loss": 6.3916, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.917445482866044e-05, |
|
"loss": 6.6126, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.909657320872274e-05, |
|
"loss": 6.6796, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.901869158878505e-05, |
|
"loss": 6.7804, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.894080996884736e-05, |
|
"loss": 6.8676, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.886292834890966e-05, |
|
"loss": 6.382, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.878504672897197e-05, |
|
"loss": 6.9376, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.870716510903428e-05, |
|
"loss": 6.8211, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.862928348909659e-05, |
|
"loss": 6.9026, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.855140186915888e-05, |
|
"loss": 6.8975, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.847352024922119e-05, |
|
"loss": 6.8237, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.83956386292835e-05, |
|
"loss": 6.7636, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.831775700934581e-05, |
|
"loss": 6.7447, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.82398753894081e-05, |
|
"loss": 6.9774, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.816199376947041e-05, |
|
"loss": 6.8717, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.808411214953272e-05, |
|
"loss": 6.9049, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.800623052959502e-05, |
|
"loss": 6.9356, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.792834890965733e-05, |
|
"loss": 6.7955, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.785046728971964e-05, |
|
"loss": 6.8461, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.777258566978193e-05, |
|
"loss": 6.9118, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.769470404984424e-05, |
|
"loss": 6.8114, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.761682242990654e-05, |
|
"loss": 6.8603, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.753894080996884e-05, |
|
"loss": 6.9247, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.746105919003115e-05, |
|
"loss": 6.7681, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.738317757009346e-05, |
|
"loss": 6.8088, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.730529595015576e-05, |
|
"loss": 6.7462, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.722741433021807e-05, |
|
"loss": 7.0845, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.714953271028038e-05, |
|
"loss": 6.8837, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.707165109034269e-05, |
|
"loss": 6.778, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.699376947040498e-05, |
|
"loss": 6.758, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.691588785046729e-05, |
|
"loss": 6.8478, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.68380062305296e-05, |
|
"loss": 6.8756, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.676012461059191e-05, |
|
"loss": 6.6917, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.66822429906542e-05, |
|
"loss": 6.8888, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.660436137071651e-05, |
|
"loss": 6.8382, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.652647975077882e-05, |
|
"loss": 6.9166, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.644859813084113e-05, |
|
"loss": 7.0303, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.637071651090343e-05, |
|
"loss": 6.8108, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.629283489096574e-05, |
|
"loss": 6.7811, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.621495327102804e-05, |
|
"loss": 6.8667, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.613707165109035e-05, |
|
"loss": 7.0215, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.605919003115265e-05, |
|
"loss": 6.895, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.598130841121496e-05, |
|
"loss": 6.7047, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.590342679127727e-05, |
|
"loss": 6.8253, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.582554517133958e-05, |
|
"loss": 7.0848, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.574766355140187e-05, |
|
"loss": 6.7936, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.566978193146418e-05, |
|
"loss": 6.7671, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.559190031152649e-05, |
|
"loss": 6.9499, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.55140186915888e-05, |
|
"loss": 6.8311, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.54361370716511e-05, |
|
"loss": 6.7495, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.53582554517134e-05, |
|
"loss": 6.8299, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.528037383177571e-05, |
|
"loss": 6.922, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.520249221183801e-05, |
|
"loss": 6.7502, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.512461059190032e-05, |
|
"loss": 6.5571, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.504672897196261e-05, |
|
"loss": 6.5687, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.496884735202492e-05, |
|
"loss": 6.5734, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.489096573208723e-05, |
|
"loss": 6.79, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.481308411214953e-05, |
|
"loss": 6.8292, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.473520249221184e-05, |
|
"loss": 6.7094, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.465732087227414e-05, |
|
"loss": 7.0788, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.457943925233645e-05, |
|
"loss": 6.7638, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.450155763239875e-05, |
|
"loss": 7.0279, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.442367601246106e-05, |
|
"loss": 6.8799, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.434579439252337e-05, |
|
"loss": 7.0409, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.426791277258568e-05, |
|
"loss": 6.7372, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.419003115264797e-05, |
|
"loss": 6.903, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.411214953271028e-05, |
|
"loss": 6.8234, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.403426791277259e-05, |
|
"loss": 6.8567, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.39563862928349e-05, |
|
"loss": 6.7834, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.38785046728972e-05, |
|
"loss": 7.0614, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.38006230529595e-05, |
|
"loss": 6.8783, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.372274143302181e-05, |
|
"loss": 6.7742, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.364485981308412e-05, |
|
"loss": 6.8021, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.356697819314642e-05, |
|
"loss": 6.9017, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.348909657320873e-05, |
|
"loss": 6.6549, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.341121495327104e-05, |
|
"loss": 6.893, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.333333333333334e-05, |
|
"loss": 6.9332, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.325545171339564e-05, |
|
"loss": 7.0721, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.317757009345795e-05, |
|
"loss": 6.9832, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.309968847352026e-05, |
|
"loss": 6.8941, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.302180685358257e-05, |
|
"loss": 6.8725, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.294392523364486e-05, |
|
"loss": 6.7042, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.286604361370717e-05, |
|
"loss": 6.52, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.278816199376948e-05, |
|
"loss": 6.8261, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.271028037383179e-05, |
|
"loss": 6.7406, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.263239875389408e-05, |
|
"loss": 6.9421, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.25545171339564e-05, |
|
"loss": 6.8402, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.24766355140187e-05, |
|
"loss": 6.6862, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.2398753894081e-05, |
|
"loss": 6.9255, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.232087227414331e-05, |
|
"loss": 6.9109, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.22429906542056e-05, |
|
"loss": 6.8192, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.216510903426791e-05, |
|
"loss": 6.805, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.208722741433022e-05, |
|
"loss": 6.7994, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.200934579439252e-05, |
|
"loss": 6.812, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.193146417445483e-05, |
|
"loss": 6.8544, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.185358255451713e-05, |
|
"loss": 6.8513, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.177570093457944e-05, |
|
"loss": 6.9947, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.169781931464174e-05, |
|
"loss": 6.9098, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.161993769470405e-05, |
|
"loss": 6.8769, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.154205607476636e-05, |
|
"loss": 6.8678, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.146417445482867e-05, |
|
"loss": 6.8643, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.138629283489096e-05, |
|
"loss": 6.5046, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.130841121495327e-05, |
|
"loss": 6.6708, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.123052959501558e-05, |
|
"loss": 6.582, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.115264797507789e-05, |
|
"loss": 6.4845, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.107476635514018e-05, |
|
"loss": 6.3926, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.09968847352025e-05, |
|
"loss": 6.9281, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.09190031152648e-05, |
|
"loss": 6.8161, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.084112149532711e-05, |
|
"loss": 6.8117, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.076323987538941e-05, |
|
"loss": 6.8274, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.068535825545172e-05, |
|
"loss": 6.7444, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.060747663551403e-05, |
|
"loss": 6.907, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.052959501557633e-05, |
|
"loss": 6.9038, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.045171339563863e-05, |
|
"loss": 6.9628, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 8.037383177570094e-05, |
|
"loss": 6.5955, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 8.029595015576325e-05, |
|
"loss": 6.7418, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 8.021806853582556e-05, |
|
"loss": 6.7781, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 8.014018691588785e-05, |
|
"loss": 6.7946, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 8.006230529595016e-05, |
|
"loss": 6.953, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.998442367601247e-05, |
|
"loss": 6.7024, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.990654205607478e-05, |
|
"loss": 6.7587, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.982866043613708e-05, |
|
"loss": 6.6026, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.975077881619938e-05, |
|
"loss": 6.9175, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.967289719626168e-05, |
|
"loss": 6.7247, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.959501557632399e-05, |
|
"loss": 6.9453, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.95171339563863e-05, |
|
"loss": 6.746, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.94392523364486e-05, |
|
"loss": 6.861, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.93613707165109e-05, |
|
"loss": 6.8337, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.928348909657321e-05, |
|
"loss": 6.7626, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.920560747663551e-05, |
|
"loss": 6.9008, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.912772585669782e-05, |
|
"loss": 6.7699, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 7.904984423676013e-05, |
|
"loss": 6.8367, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.897196261682243e-05, |
|
"loss": 6.7613, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.889408099688473e-05, |
|
"loss": 6.9011, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.881619937694704e-05, |
|
"loss": 6.8854, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.873831775700935e-05, |
|
"loss": 6.7591, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.866043613707166e-05, |
|
"loss": 6.9075, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.858255451713395e-05, |
|
"loss": 6.7958, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.850467289719626e-05, |
|
"loss": 6.8261, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.842679127725857e-05, |
|
"loss": 6.9228, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.834890965732088e-05, |
|
"loss": 6.713, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.827102803738318e-05, |
|
"loss": 6.7864, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.819314641744548e-05, |
|
"loss": 6.6038, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.81152647975078e-05, |
|
"loss": 6.985, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.80373831775701e-05, |
|
"loss": 6.7895, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.79595015576324e-05, |
|
"loss": 6.9987, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.788161993769471e-05, |
|
"loss": 6.777, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.780373831775702e-05, |
|
"loss": 6.5008, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 7.772585669781933e-05, |
|
"loss": 6.8303, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.764797507788162e-05, |
|
"loss": 6.824, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.757009345794393e-05, |
|
"loss": 6.726, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.749221183800624e-05, |
|
"loss": 6.7346, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.741433021806855e-05, |
|
"loss": 6.3834, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.733644859813084e-05, |
|
"loss": 6.6326, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.725856697819315e-05, |
|
"loss": 6.5849, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.718068535825546e-05, |
|
"loss": 6.4028, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.710280373831776e-05, |
|
"loss": 7.047, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.702492211838007e-05, |
|
"loss": 6.9589, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.694704049844238e-05, |
|
"loss": 6.975, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.686915887850467e-05, |
|
"loss": 6.8137, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.679127725856698e-05, |
|
"loss": 7.0075, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.671339563862928e-05, |
|
"loss": 6.921, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.663551401869158e-05, |
|
"loss": 6.7786, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.65576323987539e-05, |
|
"loss": 6.7635, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.64797507788162e-05, |
|
"loss": 6.9325, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.64018691588785e-05, |
|
"loss": 7.0065, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 7.632398753894081e-05, |
|
"loss": 6.9186, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.624610591900312e-05, |
|
"loss": 6.9626, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.616822429906543e-05, |
|
"loss": 6.8821, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.609034267912772e-05, |
|
"loss": 6.7952, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.601246105919003e-05, |
|
"loss": 6.8558, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.593457943925234e-05, |
|
"loss": 6.8522, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.585669781931465e-05, |
|
"loss": 6.8126, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.577881619937694e-05, |
|
"loss": 6.872, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.570093457943925e-05, |
|
"loss": 6.9824, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.562305295950156e-05, |
|
"loss": 6.8069, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.554517133956387e-05, |
|
"loss": 6.9705, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.546728971962617e-05, |
|
"loss": 6.8006, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.538940809968848e-05, |
|
"loss": 7.0144, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.531152647975078e-05, |
|
"loss": 6.8739, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.52336448598131e-05, |
|
"loss": 6.9057, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.515576323987539e-05, |
|
"loss": 6.8216, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.50778816199377e-05, |
|
"loss": 6.8907, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 6.7087, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 7.492211838006232e-05, |
|
"loss": 6.8379, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.484423676012463e-05, |
|
"loss": 6.8193, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.476635514018692e-05, |
|
"loss": 6.9904, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.468847352024923e-05, |
|
"loss": 6.9674, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.461059190031154e-05, |
|
"loss": 6.8443, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.453271028037383e-05, |
|
"loss": 6.5263, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.445482866043614e-05, |
|
"loss": 6.6699, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.437694704049845e-05, |
|
"loss": 6.8055, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.429906542056075e-05, |
|
"loss": 6.8522, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.422118380062306e-05, |
|
"loss": 6.7135, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.414330218068535e-05, |
|
"loss": 6.664, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.406542056074766e-05, |
|
"loss": 6.8019, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.398753894080997e-05, |
|
"loss": 6.9088, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.390965732087228e-05, |
|
"loss": 6.8398, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.383177570093458e-05, |
|
"loss": 6.6816, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.375389408099688e-05, |
|
"loss": 7.0415, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.36760124610592e-05, |
|
"loss": 6.5786, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.35981308411215e-05, |
|
"loss": 6.5448, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 7.35202492211838e-05, |
|
"loss": 6.7314, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.344236760124611e-05, |
|
"loss": 6.7931, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.336448598130842e-05, |
|
"loss": 6.3914, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.328660436137072e-05, |
|
"loss": 6.3418, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.320872274143302e-05, |
|
"loss": 6.8609, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.313084112149533e-05, |
|
"loss": 6.8851, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.305295950155764e-05, |
|
"loss": 6.6982, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.297507788161995e-05, |
|
"loss": 6.8254, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.289719626168224e-05, |
|
"loss": 6.9399, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.281931464174455e-05, |
|
"loss": 6.8329, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.274143302180686e-05, |
|
"loss": 6.8554, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.266355140186917e-05, |
|
"loss": 6.9486, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.258566978193147e-05, |
|
"loss": 6.9051, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.250778816199377e-05, |
|
"loss": 6.9078, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.242990654205608e-05, |
|
"loss": 6.7512, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.235202492211839e-05, |
|
"loss": 6.6674, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.227414330218069e-05, |
|
"loss": 6.6444, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.2196261682243e-05, |
|
"loss": 6.6035, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 7.21183800623053e-05, |
|
"loss": 6.723, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.204049844236762e-05, |
|
"loss": 6.8679, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.196261682242991e-05, |
|
"loss": 6.7285, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.188473520249222e-05, |
|
"loss": 6.7685, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.180685358255453e-05, |
|
"loss": 6.8252, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.172897196261682e-05, |
|
"loss": 6.8381, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.165109034267913e-05, |
|
"loss": 6.8428, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.157320872274143e-05, |
|
"loss": 6.8216, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.149532710280374e-05, |
|
"loss": 6.8508, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.141744548286605e-05, |
|
"loss": 6.8354, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.133956386292834e-05, |
|
"loss": 7.1515, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.126168224299065e-05, |
|
"loss": 6.8919, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.118380062305296e-05, |
|
"loss": 7.0091, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.110591900311527e-05, |
|
"loss": 6.834, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.102803738317757e-05, |
|
"loss": 6.682, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.095015576323987e-05, |
|
"loss": 6.9469, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.087227414330218e-05, |
|
"loss": 6.885, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.079439252336449e-05, |
|
"loss": 6.5958, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 7.071651090342679e-05, |
|
"loss": 6.8998, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 7.06386292834891e-05, |
|
"loss": 6.8147, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 7.05607476635514e-05, |
|
"loss": 6.943, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 7.048286604361372e-05, |
|
"loss": 7.039, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 7.040498442367601e-05, |
|
"loss": 6.8876, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 7.032710280373832e-05, |
|
"loss": 6.7416, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 7.024922118380063e-05, |
|
"loss": 6.7203, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 7.017133956386294e-05, |
|
"loss": 6.7907, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 7.009345794392523e-05, |
|
"loss": 6.7941, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 7.001557632398754e-05, |
|
"loss": 6.8814, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 6.993769470404985e-05, |
|
"loss": 6.8096, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 6.985981308411216e-05, |
|
"loss": 6.7377, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 6.978193146417446e-05, |
|
"loss": 6.6372, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 6.970404984423677e-05, |
|
"loss": 6.6401, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 6.962616822429907e-05, |
|
"loss": 6.4068, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 6.954828660436138e-05, |
|
"loss": 6.7655, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 6.947040498442368e-05, |
|
"loss": 6.1363, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 6.939252336448599e-05, |
|
"loss": 6.3524, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.93146417445483e-05, |
|
"loss": 6.7357, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.92367601246106e-05, |
|
"loss": 6.9655, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.91588785046729e-05, |
|
"loss": 6.9704, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.908099688473521e-05, |
|
"loss": 6.788, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.90031152647975e-05, |
|
"loss": 7.0225, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.892523364485982e-05, |
|
"loss": 7.074, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.884735202492212e-05, |
|
"loss": 6.8425, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.876947040498442e-05, |
|
"loss": 6.7058, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.869158878504673e-05, |
|
"loss": 6.9454, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.861370716510904e-05, |
|
"loss": 6.8068, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.853582554517133e-05, |
|
"loss": 7.0257, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.845794392523364e-05, |
|
"loss": 6.7646, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.838006230529595e-05, |
|
"loss": 6.7171, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.830218068535826e-05, |
|
"loss": 6.9144, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.822429906542056e-05, |
|
"loss": 6.8228, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.814641744548287e-05, |
|
"loss": 6.6986, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.806853582554517e-05, |
|
"loss": 6.7164, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 6.799065420560748e-05, |
|
"loss": 6.915, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.791277258566978e-05, |
|
"loss": 6.8022, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.783489096573209e-05, |
|
"loss": 6.6838, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.77570093457944e-05, |
|
"loss": 6.8513, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.76791277258567e-05, |
|
"loss": 6.5992, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.7601246105919e-05, |
|
"loss": 6.6761, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.752336448598131e-05, |
|
"loss": 6.8706, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.744548286604362e-05, |
|
"loss": 6.8081, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.736760124610593e-05, |
|
"loss": 6.9297, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.728971962616822e-05, |
|
"loss": 6.7386, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.721183800623053e-05, |
|
"loss": 6.7248, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.713395638629284e-05, |
|
"loss": 6.7868, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.705607476635515e-05, |
|
"loss": 6.8418, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.697819314641745e-05, |
|
"loss": 6.8838, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.690031152647976e-05, |
|
"loss": 6.9916, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.682242990654207e-05, |
|
"loss": 6.643, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.674454828660437e-05, |
|
"loss": 6.9304, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 6.7734, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 6.658878504672898e-05, |
|
"loss": 6.8865, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.651090342679129e-05, |
|
"loss": 6.8812, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.64330218068536e-05, |
|
"loss": 6.7532, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.635514018691589e-05, |
|
"loss": 6.5505, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.62772585669782e-05, |
|
"loss": 6.9744, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.61993769470405e-05, |
|
"loss": 6.8858, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.61214953271028e-05, |
|
"loss": 6.7348, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.60436137071651e-05, |
|
"loss": 6.9998, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.596573208722741e-05, |
|
"loss": 6.6454, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.588785046728972e-05, |
|
"loss": 6.9375, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.580996884735203e-05, |
|
"loss": 6.719, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.573208722741432e-05, |
|
"loss": 6.309, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.565420560747663e-05, |
|
"loss": 6.6762, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.557632398753894e-05, |
|
"loss": 6.9591, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.549844236760125e-05, |
|
"loss": 6.0143, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.542056074766355e-05, |
|
"loss": 6.8788, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.534267912772586e-05, |
|
"loss": 6.9095, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.526479750778817e-05, |
|
"loss": 6.9366, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 6.518691588785047e-05, |
|
"loss": 6.7532, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.510903426791277e-05, |
|
"loss": 6.7928, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.503115264797508e-05, |
|
"loss": 6.8576, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.495327102803739e-05, |
|
"loss": 6.8567, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.48753894080997e-05, |
|
"loss": 6.7468, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.479750778816199e-05, |
|
"loss": 6.9872, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.47196261682243e-05, |
|
"loss": 6.7309, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.464174454828661e-05, |
|
"loss": 6.7268, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.456386292834892e-05, |
|
"loss": 6.8677, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.448598130841122e-05, |
|
"loss": 6.775, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.440809968847352e-05, |
|
"loss": 6.7498, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.433021806853583e-05, |
|
"loss": 6.7405, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.425233644859814e-05, |
|
"loss": 6.6689, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.417445482866044e-05, |
|
"loss": 6.8577, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.409657320872275e-05, |
|
"loss": 6.7889, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.401869158878506e-05, |
|
"loss": 6.9624, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.394080996884737e-05, |
|
"loss": 6.6506, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.386292834890966e-05, |
|
"loss": 6.8833, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.378504672897197e-05, |
|
"loss": 6.8509, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.370716510903428e-05, |
|
"loss": 6.8744, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.362928348909657e-05, |
|
"loss": 7.0174, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.355140186915888e-05, |
|
"loss": 6.8306, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.347352024922119e-05, |
|
"loss": 6.6668, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.339563862928349e-05, |
|
"loss": 7.106, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.33177570093458e-05, |
|
"loss": 6.8451, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.323987538940809e-05, |
|
"loss": 6.7461, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.31619937694704e-05, |
|
"loss": 6.6756, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.308411214953271e-05, |
|
"loss": 6.504, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.300623052959502e-05, |
|
"loss": 6.6958, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.292834890965732e-05, |
|
"loss": 6.9021, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.285046728971962e-05, |
|
"loss": 6.8218, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.277258566978193e-05, |
|
"loss": 6.6677, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.269470404984424e-05, |
|
"loss": 6.8899, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.261682242990654e-05, |
|
"loss": 6.9077, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.253894080996885e-05, |
|
"loss": 6.7617, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.246105919003116e-05, |
|
"loss": 7.1058, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 6.238317757009346e-05, |
|
"loss": 6.7343, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.230529595015576e-05, |
|
"loss": 6.6018, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.222741433021807e-05, |
|
"loss": 6.958, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.214953271028038e-05, |
|
"loss": 6.6786, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.207165109034269e-05, |
|
"loss": 6.7557, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.199376947040498e-05, |
|
"loss": 6.7065, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.191588785046729e-05, |
|
"loss": 6.3056, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.18380062305296e-05, |
|
"loss": 6.323, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.176012461059191e-05, |
|
"loss": 7.0721, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.16822429906542e-05, |
|
"loss": 6.6362, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.160436137071651e-05, |
|
"loss": 6.4594, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"eval_loss": 6.71768856048584, |
|
"eval_runtime": 913.7734, |
|
"eval_samples_per_second": 2.891, |
|
"eval_steps_per_second": 0.362, |
|
"eval_wer": 1.470444669325555, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.152647975077882e-05, |
|
"loss": 7.0822, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.144859813084113e-05, |
|
"loss": 6.8131, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.137071651090343e-05, |
|
"loss": 6.8633, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.129283489096574e-05, |
|
"loss": 6.9075, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.121495327102805e-05, |
|
"loss": 7.0231, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.113707165109036e-05, |
|
"loss": 6.9424, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.105919003115265e-05, |
|
"loss": 6.8085, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.0981308411214953e-05, |
|
"loss": 6.7931, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.090342679127726e-05, |
|
"loss": 6.7629, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.082554517133957e-05, |
|
"loss": 6.7918, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.074766355140187e-05, |
|
"loss": 6.7224, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.0669781931464176e-05, |
|
"loss": 6.9353, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.0591900311526485e-05, |
|
"loss": 6.7207, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.0514018691588794e-05, |
|
"loss": 6.9447, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.043613707165109e-05, |
|
"loss": 6.831, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.03582554517134e-05, |
|
"loss": 6.6729, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.028037383177571e-05, |
|
"loss": 6.7937, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.020249221183801e-05, |
|
"loss": 6.7578, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.012461059190031e-05, |
|
"loss": 7.0527, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 6.0046728971962615e-05, |
|
"loss": 6.9404, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 5.9968847352024924e-05, |
|
"loss": 6.8369, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 5.989096573208723e-05, |
|
"loss": 6.7157, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 5.981308411214953e-05, |
|
"loss": 6.6451, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 5.973520249221184e-05, |
|
"loss": 6.7113, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 5.9657320872274146e-05, |
|
"loss": 6.9228, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.9579439252336456e-05, |
|
"loss": 6.7246, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.950155763239875e-05, |
|
"loss": 6.8818, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.942367601246106e-05, |
|
"loss": 6.9532, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.934579439252337e-05, |
|
"loss": 6.7985, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.926791277258568e-05, |
|
"loss": 6.8957, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.9190031152647974e-05, |
|
"loss": 6.7775, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.911214953271028e-05, |
|
"loss": 6.8185, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.903426791277259e-05, |
|
"loss": 6.7675, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.89563862928349e-05, |
|
"loss": 6.6111, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.8878504672897196e-05, |
|
"loss": 6.7393, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.8800623052959505e-05, |
|
"loss": 6.8334, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.872274143302181e-05, |
|
"loss": 6.551, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.864485981308412e-05, |
|
"loss": 6.8451, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.856697819314641e-05, |
|
"loss": 6.8282, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.848909657320872e-05, |
|
"loss": 6.7903, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.841121495327103e-05, |
|
"loss": 6.9499, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.833333333333334e-05, |
|
"loss": 6.6953, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.8255451713395635e-05, |
|
"loss": 6.659, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.8177570093457944e-05, |
|
"loss": 6.7088, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.809968847352025e-05, |
|
"loss": 6.85, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.802180685358256e-05, |
|
"loss": 6.7072, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.794392523364486e-05, |
|
"loss": 6.6948, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.786604361370717e-05, |
|
"loss": 6.3604, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.7788161993769476e-05, |
|
"loss": 6.2727, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.7710280373831785e-05, |
|
"loss": 6.7958, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.763239875389408e-05, |
|
"loss": 7.1558, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.755451713395639e-05, |
|
"loss": 6.757, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.74766355140187e-05, |
|
"loss": 6.778, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.7398753894081e-05, |
|
"loss": 6.8464, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.73208722741433e-05, |
|
"loss": 6.8851, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.7242990654205605e-05, |
|
"loss": 6.8925, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.7165109034267914e-05, |
|
"loss": 6.9586, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.7087227414330223e-05, |
|
"loss": 6.7784, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.700934579439252e-05, |
|
"loss": 6.8225, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.693146417445483e-05, |
|
"loss": 6.8422, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.685358255451714e-05, |
|
"loss": 6.8674, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.6775700934579446e-05, |
|
"loss": 6.8149, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.669781931464174e-05, |
|
"loss": 7.0834, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.661993769470405e-05, |
|
"loss": 6.7002, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.654205607476636e-05, |
|
"loss": 6.9505, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.646417445482867e-05, |
|
"loss": 6.5579, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.6386292834890964e-05, |
|
"loss": 7.0764, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.6308411214953273e-05, |
|
"loss": 6.7833, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.623052959501558e-05, |
|
"loss": 6.8208, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.6152647975077885e-05, |
|
"loss": 6.8433, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.607476635514019e-05, |
|
"loss": 6.7344, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.5996884735202496e-05, |
|
"loss": 6.6265, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.59190031152648e-05, |
|
"loss": 6.8649, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.584112149532711e-05, |
|
"loss": 6.8588, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.57632398753894e-05, |
|
"loss": 6.7167, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.568535825545171e-05, |
|
"loss": 6.8217, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.560747663551402e-05, |
|
"loss": 6.7223, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.552959501557633e-05, |
|
"loss": 6.7281, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.5451713395638626e-05, |
|
"loss": 6.9123, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.5373831775700935e-05, |
|
"loss": 6.7693, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.5295950155763244e-05, |
|
"loss": 6.7747, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.521806853582555e-05, |
|
"loss": 6.7762, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.514018691588785e-05, |
|
"loss": 6.7199, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.506230529595016e-05, |
|
"loss": 6.8998, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.4984423676012466e-05, |
|
"loss": 6.7387, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.4906542056074776e-05, |
|
"loss": 6.9027, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.482866043613707e-05, |
|
"loss": 6.9936, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.475077881619938e-05, |
|
"loss": 6.8707, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.467289719626168e-05, |
|
"loss": 6.9524, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.459501557632399e-05, |
|
"loss": 6.7717, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.4517133956386294e-05, |
|
"loss": 6.8699, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.4439252336448596e-05, |
|
"loss": 6.9171, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.4361370716510905e-05, |
|
"loss": 6.7668, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.4283489096573214e-05, |
|
"loss": 6.491, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.420560747663551e-05, |
|
"loss": 6.6993, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.412772585669782e-05, |
|
"loss": 6.5826, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.404984423676013e-05, |
|
"loss": 6.741, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.397196261682244e-05, |
|
"loss": 6.9419, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.389408099688473e-05, |
|
"loss": 6.5356, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.381619937694704e-05, |
|
"loss": 6.1933, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.373831775700935e-05, |
|
"loss": 6.6941, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.366043613707166e-05, |
|
"loss": 6.9154, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.3582554517133955e-05, |
|
"loss": 6.9298, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.3504672897196264e-05, |
|
"loss": 6.7676, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.342679127725857e-05, |
|
"loss": 6.7064, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.3348909657320875e-05, |
|
"loss": 6.6839, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.327102803738318e-05, |
|
"loss": 6.8213, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.319314641744548e-05, |
|
"loss": 6.8041, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.311526479750779e-05, |
|
"loss": 6.8511, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.30373831775701e-05, |
|
"loss": 6.8086, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.2959501557632394e-05, |
|
"loss": 6.7143, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.28816199376947e-05, |
|
"loss": 6.8243, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.280373831775701e-05, |
|
"loss": 6.7176, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.272585669781932e-05, |
|
"loss": 6.8678, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.2647975077881616e-05, |
|
"loss": 6.8847, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.2570093457943925e-05, |
|
"loss": 6.5607, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.2492211838006234e-05, |
|
"loss": 6.6599, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.2414330218068543e-05, |
|
"loss": 6.8232, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.233644859813084e-05, |
|
"loss": 6.8677, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.225856697819315e-05, |
|
"loss": 6.7035, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.218068535825546e-05, |
|
"loss": 6.8124, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.210280373831776e-05, |
|
"loss": 6.7539, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.202492211838006e-05, |
|
"loss": 6.6226, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.194704049844237e-05, |
|
"loss": 6.7325, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.186915887850467e-05, |
|
"loss": 6.7674, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.179127725856698e-05, |
|
"loss": 6.6449, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.171339563862928e-05, |
|
"loss": 6.7298, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.163551401869159e-05, |
|
"loss": 7.0989, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.1557632398753896e-05, |
|
"loss": 6.8435, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.1479750778816205e-05, |
|
"loss": 6.9057, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.14018691588785e-05, |
|
"loss": 6.9007, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.132398753894081e-05, |
|
"loss": 6.8917, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.124610591900312e-05, |
|
"loss": 6.7459, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.116822429906543e-05, |
|
"loss": 6.7366, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.109034267912772e-05, |
|
"loss": 6.8867, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.101246105919003e-05, |
|
"loss": 6.7771, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.093457943925234e-05, |
|
"loss": 6.7681, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.085669781931465e-05, |
|
"loss": 6.856, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.0778816199376946e-05, |
|
"loss": 6.5794, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.0700934579439255e-05, |
|
"loss": 6.8176, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.062305295950156e-05, |
|
"loss": 6.7364, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.0545171339563866e-05, |
|
"loss": 7.0007, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.046728971962617e-05, |
|
"loss": 6.8698, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.038940809968847e-05, |
|
"loss": 6.8229, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.031152647975078e-05, |
|
"loss": 6.6198, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.023364485981309e-05, |
|
"loss": 6.7486, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.0155763239875384e-05, |
|
"loss": 6.6884, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.007788161993769e-05, |
|
"loss": 6.5174, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5e-05, |
|
"loss": 6.3846, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.992211838006231e-05, |
|
"loss": 6.2623, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.9844236760124614e-05, |
|
"loss": 6.8907, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.976635514018692e-05, |
|
"loss": 7.0237, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.9688473520249225e-05, |
|
"loss": 6.7694, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.9610591900311534e-05, |
|
"loss": 6.8883, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.9532710280373836e-05, |
|
"loss": 6.9646, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.9454828660436145e-05, |
|
"loss": 6.9124, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.937694704049845e-05, |
|
"loss": 6.7865, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.929906542056075e-05, |
|
"loss": 6.9762, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.922118380062305e-05, |
|
"loss": 6.6659, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.914330218068536e-05, |
|
"loss": 6.6963, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.9065420560747664e-05, |
|
"loss": 6.8647, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.898753894080997e-05, |
|
"loss": 6.6314, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.8909657320872275e-05, |
|
"loss": 6.7734, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.8831775700934584e-05, |
|
"loss": 6.6634, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.8753894080996886e-05, |
|
"loss": 6.8744, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.8676012461059195e-05, |
|
"loss": 6.7995, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.85981308411215e-05, |
|
"loss": 6.9712, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.852024922118381e-05, |
|
"loss": 6.4927, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.844236760124611e-05, |
|
"loss": 6.7727, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.836448598130842e-05, |
|
"loss": 6.8457, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.828660436137072e-05, |
|
"loss": 6.8111, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.820872274143303e-05, |
|
"loss": 6.7105, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.813084112149533e-05, |
|
"loss": 6.9385, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.8052959501557634e-05, |
|
"loss": 6.6836, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.797507788161994e-05, |
|
"loss": 6.6711, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.7897196261682245e-05, |
|
"loss": 6.7849, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.781931464174455e-05, |
|
"loss": 6.8233, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.774143302180686e-05, |
|
"loss": 6.6739, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.766355140186916e-05, |
|
"loss": 6.6495, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.758566978193147e-05, |
|
"loss": 7.0085, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.750778816199377e-05, |
|
"loss": 6.7702, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.742990654205608e-05, |
|
"loss": 6.6482, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.735202492211838e-05, |
|
"loss": 6.6838, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.727414330218069e-05, |
|
"loss": 6.7068, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.719626168224299e-05, |
|
"loss": 7.1207, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.71183800623053e-05, |
|
"loss": 6.7004, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.7040498442367604e-05, |
|
"loss": 6.8511, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.6962616822429913e-05, |
|
"loss": 6.8075, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.6884735202492216e-05, |
|
"loss": 6.7637, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.6806853582554525e-05, |
|
"loss": 6.7424, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.672897196261683e-05, |
|
"loss": 6.7626, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.665109034267913e-05, |
|
"loss": 6.5671, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.657320872274143e-05, |
|
"loss": 6.6137, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.649532710280374e-05, |
|
"loss": 6.6918, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.641744548286604e-05, |
|
"loss": 6.523, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.633956386292835e-05, |
|
"loss": 6.5162, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.6261682242990654e-05, |
|
"loss": 6.4098, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.6183800623052963e-05, |
|
"loss": 6.5577, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.6105919003115266e-05, |
|
"loss": 6.1522, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.6028037383177575e-05, |
|
"loss": 5.863, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.595015576323988e-05, |
|
"loss": 6.8155, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.5872274143302186e-05, |
|
"loss": 6.6422, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.579439252336449e-05, |
|
"loss": 6.8233, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.57165109034268e-05, |
|
"loss": 6.6712, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.56386292834891e-05, |
|
"loss": 6.754, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.556074766355141e-05, |
|
"loss": 6.735, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.548286604361371e-05, |
|
"loss": 6.8132, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.540498442367602e-05, |
|
"loss": 6.8136, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.532710280373832e-05, |
|
"loss": 6.7848, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.5249221183800625e-05, |
|
"loss": 6.6918, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.517133956386293e-05, |
|
"loss": 6.8579, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.5093457943925236e-05, |
|
"loss": 6.6428, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.501557632398754e-05, |
|
"loss": 6.7914, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.493769470404985e-05, |
|
"loss": 6.9995, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.485981308411215e-05, |
|
"loss": 6.7258, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.478193146417446e-05, |
|
"loss": 6.6824, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.470404984423676e-05, |
|
"loss": 6.7023, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.462616822429907e-05, |
|
"loss": 6.4243, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.454828660436137e-05, |
|
"loss": 6.6686, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.447040498442368e-05, |
|
"loss": 6.8926, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.4392523364485984e-05, |
|
"loss": 6.8184, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.431464174454829e-05, |
|
"loss": 6.8656, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.4236760124610595e-05, |
|
"loss": 6.9881, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.4158878504672904e-05, |
|
"loss": 6.7487, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.4080996884735206e-05, |
|
"loss": 6.7747, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.400311526479751e-05, |
|
"loss": 6.7369, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.392523364485982e-05, |
|
"loss": 6.7263, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.384735202492212e-05, |
|
"loss": 6.8761, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.376947040498442e-05, |
|
"loss": 6.818, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.369158878504673e-05, |
|
"loss": 7.106, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.3613707165109034e-05, |
|
"loss": 6.8751, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.353582554517134e-05, |
|
"loss": 6.5459, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.3457943925233645e-05, |
|
"loss": 6.8746, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.3380062305295954e-05, |
|
"loss": 6.7276, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.3302180685358256e-05, |
|
"loss": 7.0052, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.3224299065420565e-05, |
|
"loss": 6.77, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.314641744548287e-05, |
|
"loss": 6.8492, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.306853582554518e-05, |
|
"loss": 6.5679, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.299065420560748e-05, |
|
"loss": 6.9408, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.291277258566979e-05, |
|
"loss": 6.8556, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.283489096573209e-05, |
|
"loss": 6.7181, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.27570093457944e-05, |
|
"loss": 6.9359, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.26791277258567e-05, |
|
"loss": 6.7889, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.2601246105919004e-05, |
|
"loss": 6.5935, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.2523364485981306e-05, |
|
"loss": 6.586, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.2445482866043615e-05, |
|
"loss": 6.8654, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.236760124610592e-05, |
|
"loss": 6.7072, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.228971962616823e-05, |
|
"loss": 6.672, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.221183800623053e-05, |
|
"loss": 6.3994, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.213395638629284e-05, |
|
"loss": 6.6606, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.205607476635514e-05, |
|
"loss": 6.9111, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.197819314641745e-05, |
|
"loss": 6.7398, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.190031152647975e-05, |
|
"loss": 6.8289, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.182242990654206e-05, |
|
"loss": 6.8522, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.174454828660436e-05, |
|
"loss": 7.0657, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.166666666666667e-05, |
|
"loss": 6.7021, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.1588785046728974e-05, |
|
"loss": 6.7255, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.1510903426791283e-05, |
|
"loss": 6.8391, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.1433021806853586e-05, |
|
"loss": 6.786, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.1355140186915895e-05, |
|
"loss": 6.902, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.12772585669782e-05, |
|
"loss": 6.833, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.11993769470405e-05, |
|
"loss": 6.8349, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.11214953271028e-05, |
|
"loss": 6.7596, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.104361370716511e-05, |
|
"loss": 6.687, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.096573208722741e-05, |
|
"loss": 6.8897, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.088785046728972e-05, |
|
"loss": 6.9136, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.0809968847352024e-05, |
|
"loss": 6.7326, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.073208722741433e-05, |
|
"loss": 6.8314, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.0654205607476636e-05, |
|
"loss": 6.6352, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.0576323987538945e-05, |
|
"loss": 6.7198, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.049844236760125e-05, |
|
"loss": 6.6141, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.0420560747663556e-05, |
|
"loss": 6.5693, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.034267912772586e-05, |
|
"loss": 6.892, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.026479750778817e-05, |
|
"loss": 6.6293, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.018691588785047e-05, |
|
"loss": 6.772, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.010903426791278e-05, |
|
"loss": 6.9198, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.003115264797508e-05, |
|
"loss": 6.9733, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.995327102803739e-05, |
|
"loss": 6.8218, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.987538940809969e-05, |
|
"loss": 6.8903, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.9797507788161995e-05, |
|
"loss": 6.6247, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.97196261682243e-05, |
|
"loss": 6.6749, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.9641744548286606e-05, |
|
"loss": 6.7072, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.956386292834891e-05, |
|
"loss": 6.9025, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.948598130841122e-05, |
|
"loss": 6.6806, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.940809968847352e-05, |
|
"loss": 6.7537, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.933021806853583e-05, |
|
"loss": 6.8352, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.925233644859813e-05, |
|
"loss": 6.807, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.917445482866044e-05, |
|
"loss": 6.6456, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.909657320872274e-05, |
|
"loss": 6.7944, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.901869158878505e-05, |
|
"loss": 6.6344, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.8940809968847354e-05, |
|
"loss": 6.7112, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.886292834890966e-05, |
|
"loss": 6.7961, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.8785046728971965e-05, |
|
"loss": 6.8133, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.8707165109034274e-05, |
|
"loss": 6.5159, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.8629283489096576e-05, |
|
"loss": 6.6351, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.855140186915888e-05, |
|
"loss": 6.5139, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.847352024922119e-05, |
|
"loss": 6.3613, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.839563862928349e-05, |
|
"loss": 6.2431, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.831775700934579e-05, |
|
"loss": 6.6968, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.82398753894081e-05, |
|
"loss": 6.1385, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.8161993769470404e-05, |
|
"loss": 6.8475, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.808411214953271e-05, |
|
"loss": 6.8944, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.8006230529595015e-05, |
|
"loss": 6.8663, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.7928348909657324e-05, |
|
"loss": 6.8245, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.7850467289719626e-05, |
|
"loss": 6.7407, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.7772585669781935e-05, |
|
"loss": 6.8813, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.769470404984424e-05, |
|
"loss": 6.6473, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.761682242990655e-05, |
|
"loss": 6.8038, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.753894080996885e-05, |
|
"loss": 6.5804, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.746105919003116e-05, |
|
"loss": 6.6848, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.738317757009346e-05, |
|
"loss": 6.8537, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.730529595015577e-05, |
|
"loss": 6.7886, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.722741433021807e-05, |
|
"loss": 6.7024, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.7149532710280374e-05, |
|
"loss": 6.6253, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.7071651090342676e-05, |
|
"loss": 7.0516, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.6993769470404985e-05, |
|
"loss": 6.6521, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.691588785046729e-05, |
|
"loss": 6.5897, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.68380062305296e-05, |
|
"loss": 6.8331, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.67601246105919e-05, |
|
"loss": 6.6247, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.668224299065421e-05, |
|
"loss": 6.7378, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.660436137071651e-05, |
|
"loss": 6.5653, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.652647975077882e-05, |
|
"loss": 6.6614, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.644859813084112e-05, |
|
"loss": 6.827, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.637071651090343e-05, |
|
"loss": 6.9346, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.629283489096573e-05, |
|
"loss": 6.8677, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.621495327102804e-05, |
|
"loss": 6.6434, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.6137071651090344e-05, |
|
"loss": 6.5439, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.605919003115265e-05, |
|
"loss": 6.7886, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.5981308411214956e-05, |
|
"loss": 6.873, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.5903426791277265e-05, |
|
"loss": 6.7449, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.582554517133957e-05, |
|
"loss": 6.6457, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.574766355140187e-05, |
|
"loss": 6.6758, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.566978193146417e-05, |
|
"loss": 6.8939, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.559190031152648e-05, |
|
"loss": 6.9075, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.551401869158878e-05, |
|
"loss": 6.5217, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.543613707165109e-05, |
|
"loss": 6.7286, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.5358255451713394e-05, |
|
"loss": 6.8534, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.52803738317757e-05, |
|
"loss": 6.7695, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.5202492211838006e-05, |
|
"loss": 6.8532, |
|
"step": 1339 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.5124610591900315e-05, |
|
"loss": 6.7991, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.504672897196262e-05, |
|
"loss": 6.858, |
|
"step": 1341 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.4968847352024926e-05, |
|
"loss": 6.7159, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.489096573208723e-05, |
|
"loss": 6.7994, |
|
"step": 1343 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.481308411214954e-05, |
|
"loss": 6.6377, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.473520249221184e-05, |
|
"loss": 6.6774, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.465732087227415e-05, |
|
"loss": 6.7002, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.457943925233645e-05, |
|
"loss": 6.6682, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.450155763239875e-05, |
|
"loss": 6.2381, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.442367601246106e-05, |
|
"loss": 6.4779, |
|
"step": 1349 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.4345794392523365e-05, |
|
"loss": 5.8608, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.426791277258567e-05, |
|
"loss": 6.9529, |
|
"step": 1351 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.4190031152647976e-05, |
|
"loss": 6.9269, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.411214953271028e-05, |
|
"loss": 6.813, |
|
"step": 1353 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.403426791277259e-05, |
|
"loss": 6.6787, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.395638629283489e-05, |
|
"loss": 6.6707, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.38785046728972e-05, |
|
"loss": 6.9152, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.38006230529595e-05, |
|
"loss": 6.6825, |
|
"step": 1357 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.372274143302181e-05, |
|
"loss": 6.9089, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.364485981308411e-05, |
|
"loss": 6.8308, |
|
"step": 1359 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.356697819314642e-05, |
|
"loss": 6.8593, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.3489096573208724e-05, |
|
"loss": 6.8252, |
|
"step": 1361 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.341121495327103e-05, |
|
"loss": 6.7709, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 6.7125, |
|
"step": 1363 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.3255451713395644e-05, |
|
"loss": 6.9884, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.3177570093457946e-05, |
|
"loss": 6.6132, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.309968847352025e-05, |
|
"loss": 6.771, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.302180685358255e-05, |
|
"loss": 6.7291, |
|
"step": 1367 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.294392523364486e-05, |
|
"loss": 6.7077, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.286604361370716e-05, |
|
"loss": 6.8, |
|
"step": 1369 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.278816199376947e-05, |
|
"loss": 6.8161, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.2710280373831774e-05, |
|
"loss": 6.9352, |
|
"step": 1371 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.263239875389408e-05, |
|
"loss": 6.5222, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.2554517133956385e-05, |
|
"loss": 6.8283, |
|
"step": 1373 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.2476635514018694e-05, |
|
"loss": 6.9066, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.2398753894080996e-05, |
|
"loss": 6.7347, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.2320872274143305e-05, |
|
"loss": 6.5675, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.224299065420561e-05, |
|
"loss": 6.8066, |
|
"step": 1377 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.216510903426792e-05, |
|
"loss": 6.7832, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.208722741433022e-05, |
|
"loss": 6.6992, |
|
"step": 1379 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.200934579439253e-05, |
|
"loss": 6.8386, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.193146417445483e-05, |
|
"loss": 6.5827, |
|
"step": 1381 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.185358255451714e-05, |
|
"loss": 6.7807, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.177570093457944e-05, |
|
"loss": 6.7364, |
|
"step": 1383 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.1697819314641744e-05, |
|
"loss": 6.8698, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.1619937694704046e-05, |
|
"loss": 6.8047, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.1542056074766355e-05, |
|
"loss": 6.8506, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.146417445482866e-05, |
|
"loss": 6.7804, |
|
"step": 1387 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.1386292834890967e-05, |
|
"loss": 6.6218, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.130841121495327e-05, |
|
"loss": 6.851, |
|
"step": 1389 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.123052959501558e-05, |
|
"loss": 6.7131, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.115264797507788e-05, |
|
"loss": 6.4938, |
|
"step": 1391 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.107476635514019e-05, |
|
"loss": 6.7699, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.099688473520249e-05, |
|
"loss": 6.555, |
|
"step": 1393 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.09190031152648e-05, |
|
"loss": 6.5253, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.08411214953271e-05, |
|
"loss": 6.7741, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.076323987538941e-05, |
|
"loss": 6.7548, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.0685358255451714e-05, |
|
"loss": 6.4712, |
|
"step": 1397 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.060747663551402e-05, |
|
"loss": 6.3719, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.0529595015576326e-05, |
|
"loss": 6.1981, |
|
"step": 1399 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.045171339563863e-05, |
|
"loss": 6.3146, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.0373831775700934e-05, |
|
"loss": 6.8536, |
|
"step": 1401 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.0295950155763243e-05, |
|
"loss": 6.7226, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.0218068535825545e-05, |
|
"loss": 6.6683, |
|
"step": 1403 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.0140186915887854e-05, |
|
"loss": 6.6277, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.0062305295950156e-05, |
|
"loss": 6.7321, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.9984423676012462e-05, |
|
"loss": 7.0454, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.9906542056074764e-05, |
|
"loss": 6.9378, |
|
"step": 1407 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.9828660436137073e-05, |
|
"loss": 6.7896, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.9750778816199376e-05, |
|
"loss": 6.6875, |
|
"step": 1409 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.9672897196261685e-05, |
|
"loss": 6.631, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.9595015576323987e-05, |
|
"loss": 6.6161, |
|
"step": 1411 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.9517133956386296e-05, |
|
"loss": 6.628, |
|
"step": 1412 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.9439252336448598e-05, |
|
"loss": 6.8107, |
|
"step": 1413 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.9361370716510904e-05, |
|
"loss": 6.6673, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.9283489096573206e-05, |
|
"loss": 6.8186, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.9205607476635515e-05, |
|
"loss": 6.9819, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.9127725856697818e-05, |
|
"loss": 6.9247, |
|
"step": 1417 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.9049844236760127e-05, |
|
"loss": 6.8138, |
|
"step": 1418 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.897196261682243e-05, |
|
"loss": 6.718, |
|
"step": 1419 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.8894080996884738e-05, |
|
"loss": 6.6729, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.881619937694704e-05, |
|
"loss": 6.9112, |
|
"step": 1421 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.873831775700935e-05, |
|
"loss": 6.8493, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.866043613707165e-05, |
|
"loss": 6.6929, |
|
"step": 1423 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.8582554517133957e-05, |
|
"loss": 6.7682, |
|
"step": 1424 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.850467289719626e-05, |
|
"loss": 6.8625, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.842679127725857e-05, |
|
"loss": 6.8417, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.834890965732087e-05, |
|
"loss": 6.6807, |
|
"step": 1427 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.827102803738318e-05, |
|
"loss": 6.7824, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.8193146417445482e-05, |
|
"loss": 6.8524, |
|
"step": 1429 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.811526479750779e-05, |
|
"loss": 7.0212, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.8037383177570094e-05, |
|
"loss": 6.844, |
|
"step": 1431 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.79595015576324e-05, |
|
"loss": 6.8664, |
|
"step": 1432 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.78816199376947e-05, |
|
"loss": 6.9296, |
|
"step": 1433 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.780373831775701e-05, |
|
"loss": 6.7549, |
|
"step": 1434 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.7725856697819313e-05, |
|
"loss": 6.9731, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.7647975077881622e-05, |
|
"loss": 6.5857, |
|
"step": 1436 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.7570093457943924e-05, |
|
"loss": 6.9184, |
|
"step": 1437 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.7492211838006233e-05, |
|
"loss": 6.7826, |
|
"step": 1438 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.7414330218068536e-05, |
|
"loss": 6.6698, |
|
"step": 1439 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.733644859813084e-05, |
|
"loss": 6.703, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.7258566978193147e-05, |
|
"loss": 6.8524, |
|
"step": 1441 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.7180685358255453e-05, |
|
"loss": 6.7013, |
|
"step": 1442 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.7102803738317755e-05, |
|
"loss": 6.6943, |
|
"step": 1443 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.7024922118380064e-05, |
|
"loss": 6.6667, |
|
"step": 1444 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.6947040498442366e-05, |
|
"loss": 6.6399, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.6869158878504675e-05, |
|
"loss": 6.8378, |
|
"step": 1446 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.6791277258566978e-05, |
|
"loss": 6.5056, |
|
"step": 1447 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.6713395638629287e-05, |
|
"loss": 6.3927, |
|
"step": 1448 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.663551401869159e-05, |
|
"loss": 6.5008, |
|
"step": 1449 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.6557632398753895e-05, |
|
"loss": 6.3603, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.6479750778816197e-05, |
|
"loss": 6.9112, |
|
"step": 1451 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.6401869158878506e-05, |
|
"loss": 6.8934, |
|
"step": 1452 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 2.6323987538940808e-05, |
|
"loss": 6.6079, |
|
"step": 1453 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.6246105919003117e-05, |
|
"loss": 6.6918, |
|
"step": 1454 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.616822429906542e-05, |
|
"loss": 6.6928, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.609034267912773e-05, |
|
"loss": 6.7824, |
|
"step": 1456 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.601246105919003e-05, |
|
"loss": 6.8694, |
|
"step": 1457 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.5934579439252337e-05, |
|
"loss": 6.8532, |
|
"step": 1458 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.585669781931464e-05, |
|
"loss": 6.6958, |
|
"step": 1459 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.5778816199376948e-05, |
|
"loss": 6.7867, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.570093457943925e-05, |
|
"loss": 6.7821, |
|
"step": 1461 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.562305295950156e-05, |
|
"loss": 6.8194, |
|
"step": 1462 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.554517133956386e-05, |
|
"loss": 6.6558, |
|
"step": 1463 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.546728971962617e-05, |
|
"loss": 6.6203, |
|
"step": 1464 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.5389408099688473e-05, |
|
"loss": 6.8418, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.531152647975078e-05, |
|
"loss": 6.7297, |
|
"step": 1466 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.5233644859813084e-05, |
|
"loss": 6.9122, |
|
"step": 1467 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.515576323987539e-05, |
|
"loss": 6.736, |
|
"step": 1468 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.5077881619937692e-05, |
|
"loss": 6.5482, |
|
"step": 1469 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.5e-05, |
|
"loss": 6.6138, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.4922118380062307e-05, |
|
"loss": 6.7905, |
|
"step": 1471 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.4844236760124613e-05, |
|
"loss": 6.6411, |
|
"step": 1472 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.4766355140186918e-05, |
|
"loss": 6.6297, |
|
"step": 1473 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.4688473520249224e-05, |
|
"loss": 6.5009, |
|
"step": 1474 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.4610591900311526e-05, |
|
"loss": 6.8998, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.4532710280373832e-05, |
|
"loss": 6.8103, |
|
"step": 1476 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.4454828660436138e-05, |
|
"loss": 7.0245, |
|
"step": 1477 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.4376947040498443e-05, |
|
"loss": 6.588, |
|
"step": 1478 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.429906542056075e-05, |
|
"loss": 6.6563, |
|
"step": 1479 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.4221183800623055e-05, |
|
"loss": 6.6741, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.414330218068536e-05, |
|
"loss": 6.8938, |
|
"step": 1481 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.4065420560747666e-05, |
|
"loss": 6.8526, |
|
"step": 1482 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.398753894080997e-05, |
|
"loss": 6.7989, |
|
"step": 1483 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.3909657320872274e-05, |
|
"loss": 6.5121, |
|
"step": 1484 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.383177570093458e-05, |
|
"loss": 6.9102, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.3753894080996885e-05, |
|
"loss": 6.8429, |
|
"step": 1486 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.367601246105919e-05, |
|
"loss": 6.7404, |
|
"step": 1487 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.3598130841121497e-05, |
|
"loss": 6.7011, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.3520249221183802e-05, |
|
"loss": 6.9181, |
|
"step": 1489 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.3442367601246108e-05, |
|
"loss": 6.5258, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.3364485981308414e-05, |
|
"loss": 6.8041, |
|
"step": 1491 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.3286604361370716e-05, |
|
"loss": 6.861, |
|
"step": 1492 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.320872274143302e-05, |
|
"loss": 6.5496, |
|
"step": 1493 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.3130841121495327e-05, |
|
"loss": 6.8608, |
|
"step": 1494 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.3052959501557633e-05, |
|
"loss": 6.6352, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.297507788161994e-05, |
|
"loss": 6.9325, |
|
"step": 1496 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.2897196261682244e-05, |
|
"loss": 6.5802, |
|
"step": 1497 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.281931464174455e-05, |
|
"loss": 6.591, |
|
"step": 1498 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.2741433021806856e-05, |
|
"loss": 6.2565, |
|
"step": 1499 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.266355140186916e-05, |
|
"loss": 6.2561, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"eval_loss": 6.6749796867370605, |
|
"eval_runtime": 929.946, |
|
"eval_samples_per_second": 2.841, |
|
"eval_steps_per_second": 0.356, |
|
"eval_wer": 1.905907097644701, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.2585669781931463e-05, |
|
"loss": 6.6845, |
|
"step": 1501 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.250778816199377e-05, |
|
"loss": 6.9177, |
|
"step": 1502 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.2429906542056075e-05, |
|
"loss": 6.7967, |
|
"step": 1503 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.235202492211838e-05, |
|
"loss": 6.7575, |
|
"step": 1504 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.2274143302180686e-05, |
|
"loss": 6.8156, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.2196261682242992e-05, |
|
"loss": 6.8512, |
|
"step": 1506 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.2118380062305298e-05, |
|
"loss": 6.7721, |
|
"step": 1507 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.2040498442367603e-05, |
|
"loss": 6.7047, |
|
"step": 1508 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.196261682242991e-05, |
|
"loss": 6.8443, |
|
"step": 1509 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.188473520249221e-05, |
|
"loss": 6.7234, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.1806853582554517e-05, |
|
"loss": 6.6721, |
|
"step": 1511 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.1728971962616822e-05, |
|
"loss": 6.6627, |
|
"step": 1512 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.1651090342679128e-05, |
|
"loss": 6.5215, |
|
"step": 1513 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.1573208722741434e-05, |
|
"loss": 6.6463, |
|
"step": 1514 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.149532710280374e-05, |
|
"loss": 6.5542, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.1417445482866045e-05, |
|
"loss": 6.8001, |
|
"step": 1516 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.133956386292835e-05, |
|
"loss": 6.6748, |
|
"step": 1517 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.1261682242990653e-05, |
|
"loss": 6.7196, |
|
"step": 1518 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.118380062305296e-05, |
|
"loss": 6.7024, |
|
"step": 1519 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.1105919003115264e-05, |
|
"loss": 6.7664, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.102803738317757e-05, |
|
"loss": 6.6765, |
|
"step": 1521 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.0950155763239876e-05, |
|
"loss": 6.8165, |
|
"step": 1522 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.087227414330218e-05, |
|
"loss": 6.808, |
|
"step": 1523 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.0794392523364487e-05, |
|
"loss": 6.8013, |
|
"step": 1524 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.0716510903426793e-05, |
|
"loss": 6.6336, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.06386292834891e-05, |
|
"loss": 6.5605, |
|
"step": 1526 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.05607476635514e-05, |
|
"loss": 6.7468, |
|
"step": 1527 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.0482866043613706e-05, |
|
"loss": 6.6191, |
|
"step": 1528 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.0404984423676012e-05, |
|
"loss": 6.7029, |
|
"step": 1529 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.0327102803738318e-05, |
|
"loss": 6.6654, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.0249221183800623e-05, |
|
"loss": 6.629, |
|
"step": 1531 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.017133956386293e-05, |
|
"loss": 6.4578, |
|
"step": 1532 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.0093457943925235e-05, |
|
"loss": 6.7215, |
|
"step": 1533 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.001557632398754e-05, |
|
"loss": 6.6893, |
|
"step": 1534 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.9937694704049846e-05, |
|
"loss": 6.8334, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.985981308411215e-05, |
|
"loss": 6.7196, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.9781931464174454e-05, |
|
"loss": 6.6935, |
|
"step": 1537 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.970404984423676e-05, |
|
"loss": 6.8691, |
|
"step": 1538 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.9626168224299065e-05, |
|
"loss": 6.7143, |
|
"step": 1539 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.954828660436137e-05, |
|
"loss": 6.7124, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.9470404984423677e-05, |
|
"loss": 6.6045, |
|
"step": 1541 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.9392523364485982e-05, |
|
"loss": 6.4852, |
|
"step": 1542 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.9314641744548288e-05, |
|
"loss": 6.6976, |
|
"step": 1543 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.9236760124610594e-05, |
|
"loss": 6.6177, |
|
"step": 1544 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.9158878504672896e-05, |
|
"loss": 6.7437, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.9080996884735202e-05, |
|
"loss": 6.5888, |
|
"step": 1546 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.9003115264797507e-05, |
|
"loss": 6.6297, |
|
"step": 1547 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.8925233644859813e-05, |
|
"loss": 6.5084, |
|
"step": 1548 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.884735202492212e-05, |
|
"loss": 6.6385, |
|
"step": 1549 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.8769470404984424e-05, |
|
"loss": 6.1202, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.869158878504673e-05, |
|
"loss": 6.8576, |
|
"step": 1551 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.8613707165109036e-05, |
|
"loss": 6.8767, |
|
"step": 1552 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.8535825545171338e-05, |
|
"loss": 6.6811, |
|
"step": 1553 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.8457943925233644e-05, |
|
"loss": 6.5413, |
|
"step": 1554 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.838006230529595e-05, |
|
"loss": 6.829, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.8302180685358255e-05, |
|
"loss": 6.8513, |
|
"step": 1556 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.822429906542056e-05, |
|
"loss": 6.6366, |
|
"step": 1557 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.8146417445482866e-05, |
|
"loss": 6.7194, |
|
"step": 1558 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.8068535825545172e-05, |
|
"loss": 6.6456, |
|
"step": 1559 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.7990654205607478e-05, |
|
"loss": 6.7717, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.7912772585669783e-05, |
|
"loss": 6.7982, |
|
"step": 1561 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.7834890965732086e-05, |
|
"loss": 6.7254, |
|
"step": 1562 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.775700934579439e-05, |
|
"loss": 6.6927, |
|
"step": 1563 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.7679127725856697e-05, |
|
"loss": 6.5793, |
|
"step": 1564 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.7601246105919003e-05, |
|
"loss": 6.7343, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.752336448598131e-05, |
|
"loss": 6.6936, |
|
"step": 1566 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.7445482866043614e-05, |
|
"loss": 6.6261, |
|
"step": 1567 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.736760124610592e-05, |
|
"loss": 6.8176, |
|
"step": 1568 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.7289719626168225e-05, |
|
"loss": 6.7827, |
|
"step": 1569 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.721183800623053e-05, |
|
"loss": 6.7919, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.7133956386292833e-05, |
|
"loss": 6.7903, |
|
"step": 1571 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.705607476635514e-05, |
|
"loss": 6.9781, |
|
"step": 1572 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6978193146417445e-05, |
|
"loss": 6.7162, |
|
"step": 1573 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.690031152647975e-05, |
|
"loss": 6.5349, |
|
"step": 1574 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6822429906542056e-05, |
|
"loss": 6.4349, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6744548286604362e-05, |
|
"loss": 6.6495, |
|
"step": 1576 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 6.5716, |
|
"step": 1577 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6588785046728973e-05, |
|
"loss": 6.7472, |
|
"step": 1578 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6510903426791275e-05, |
|
"loss": 6.7143, |
|
"step": 1579 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.643302180685358e-05, |
|
"loss": 6.8029, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6355140186915887e-05, |
|
"loss": 6.8607, |
|
"step": 1581 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6277258566978192e-05, |
|
"loss": 6.9013, |
|
"step": 1582 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6199376947040498e-05, |
|
"loss": 6.7816, |
|
"step": 1583 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6121495327102804e-05, |
|
"loss": 6.5974, |
|
"step": 1584 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.604361370716511e-05, |
|
"loss": 6.6809, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.5965732087227415e-05, |
|
"loss": 6.7664, |
|
"step": 1586 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.588785046728972e-05, |
|
"loss": 6.6815, |
|
"step": 1587 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.5809968847352023e-05, |
|
"loss": 6.6994, |
|
"step": 1588 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.573208722741433e-05, |
|
"loss": 6.7056, |
|
"step": 1589 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.5654205607476634e-05, |
|
"loss": 6.7295, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.557632398753894e-05, |
|
"loss": 6.6198, |
|
"step": 1591 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.5498442367601246e-05, |
|
"loss": 6.6817, |
|
"step": 1592 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.542056074766355e-05, |
|
"loss": 6.4976, |
|
"step": 1593 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.5342679127725857e-05, |
|
"loss": 6.4652, |
|
"step": 1594 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.5264797507788163e-05, |
|
"loss": 6.735, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.5186915887850467e-05, |
|
"loss": 6.6047, |
|
"step": 1596 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.5109034267912772e-05, |
|
"loss": 6.5484, |
|
"step": 1597 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.5031152647975078e-05, |
|
"loss": 6.6567, |
|
"step": 1598 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.4953271028037382e-05, |
|
"loss": 6.872, |
|
"step": 1599 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.4875389408099688e-05, |
|
"loss": 6.3103, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.4797507788161993e-05, |
|
"loss": 6.9031, |
|
"step": 1601 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.4719626168224299e-05, |
|
"loss": 6.5827, |
|
"step": 1602 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.4641744548286603e-05, |
|
"loss": 6.9098, |
|
"step": 1603 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.4563862928348909e-05, |
|
"loss": 6.9134, |
|
"step": 1604 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.4485981308411214e-05, |
|
"loss": 6.6141, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.440809968847352e-05, |
|
"loss": 6.7544, |
|
"step": 1606 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.4330218068535826e-05, |
|
"loss": 6.8258, |
|
"step": 1607 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.425233644859813e-05, |
|
"loss": 6.6041, |
|
"step": 1608 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.4174454828660435e-05, |
|
"loss": 6.8996, |
|
"step": 1609 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.4096573208722741e-05, |
|
"loss": 6.752, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.4018691588785047e-05, |
|
"loss": 6.8366, |
|
"step": 1611 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.394080996884735e-05, |
|
"loss": 6.8569, |
|
"step": 1612 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.3862928348909656e-05, |
|
"loss": 6.895, |
|
"step": 1613 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.3785046728971962e-05, |
|
"loss": 6.703, |
|
"step": 1614 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.3707165109034268e-05, |
|
"loss": 6.7691, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.3629283489096573e-05, |
|
"loss": 6.6747, |
|
"step": 1616 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.3551401869158877e-05, |
|
"loss": 6.765, |
|
"step": 1617 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.3473520249221183e-05, |
|
"loss": 6.7562, |
|
"step": 1618 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.3395638629283489e-05, |
|
"loss": 6.7024, |
|
"step": 1619 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.3317757009345794e-05, |
|
"loss": 6.7755, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.3239875389408098e-05, |
|
"loss": 6.9006, |
|
"step": 1621 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.3161993769470404e-05, |
|
"loss": 6.6809, |
|
"step": 1622 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.308411214953271e-05, |
|
"loss": 6.7764, |
|
"step": 1623 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.3006230529595015e-05, |
|
"loss": 6.4869, |
|
"step": 1624 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.292834890965732e-05, |
|
"loss": 6.8597, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.2850467289719625e-05, |
|
"loss": 6.9527, |
|
"step": 1626 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.277258566978193e-05, |
|
"loss": 6.8702, |
|
"step": 1627 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.2694704049844236e-05, |
|
"loss": 6.6271, |
|
"step": 1628 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.2616822429906542e-05, |
|
"loss": 6.7808, |
|
"step": 1629 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.2538940809968846e-05, |
|
"loss": 6.661, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.2461059190031153e-05, |
|
"loss": 6.721, |
|
"step": 1631 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.2383177570093459e-05, |
|
"loss": 7.0044, |
|
"step": 1632 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.2305295950155763e-05, |
|
"loss": 6.5281, |
|
"step": 1633 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.2227414330218069e-05, |
|
"loss": 6.7983, |
|
"step": 1634 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.2149532710280374e-05, |
|
"loss": 6.7392, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.207165109034268e-05, |
|
"loss": 6.6986, |
|
"step": 1636 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1993769470404986e-05, |
|
"loss": 6.5987, |
|
"step": 1637 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.191588785046729e-05, |
|
"loss": 6.8677, |
|
"step": 1638 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1838006230529595e-05, |
|
"loss": 6.5819, |
|
"step": 1639 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1760124610591901e-05, |
|
"loss": 6.6685, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1682242990654207e-05, |
|
"loss": 6.7633, |
|
"step": 1641 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.160436137071651e-05, |
|
"loss": 6.5929, |
|
"step": 1642 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1526479750778816e-05, |
|
"loss": 6.6485, |
|
"step": 1643 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1448598130841122e-05, |
|
"loss": 6.5444, |
|
"step": 1644 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1370716510903428e-05, |
|
"loss": 6.7383, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1292834890965732e-05, |
|
"loss": 6.6292, |
|
"step": 1646 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1214953271028037e-05, |
|
"loss": 6.437, |
|
"step": 1647 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1137071651090343e-05, |
|
"loss": 6.7836, |
|
"step": 1648 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1059190031152649e-05, |
|
"loss": 6.6059, |
|
"step": 1649 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.0981308411214954e-05, |
|
"loss": 6.0364, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.0903426791277258e-05, |
|
"loss": 6.7167, |
|
"step": 1651 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.0825545171339564e-05, |
|
"loss": 6.757, |
|
"step": 1652 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.074766355140187e-05, |
|
"loss": 6.7986, |
|
"step": 1653 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.0669781931464175e-05, |
|
"loss": 6.657, |
|
"step": 1654 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.059190031152648e-05, |
|
"loss": 6.5522, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.0514018691588785e-05, |
|
"loss": 6.7417, |
|
"step": 1656 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.043613707165109e-05, |
|
"loss": 6.8712, |
|
"step": 1657 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.0358255451713396e-05, |
|
"loss": 6.631, |
|
"step": 1658 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.02803738317757e-05, |
|
"loss": 6.5562, |
|
"step": 1659 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.0202492211838006e-05, |
|
"loss": 6.7917, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.0124610591900312e-05, |
|
"loss": 6.6893, |
|
"step": 1661 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.0046728971962617e-05, |
|
"loss": 6.5926, |
|
"step": 1662 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 9.968847352024923e-06, |
|
"loss": 6.5844, |
|
"step": 1663 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 9.890965732087227e-06, |
|
"loss": 6.8132, |
|
"step": 1664 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 9.813084112149533e-06, |
|
"loss": 6.8008, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 9.735202492211838e-06, |
|
"loss": 6.7277, |
|
"step": 1666 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 9.657320872274144e-06, |
|
"loss": 6.8007, |
|
"step": 1667 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 9.579439252336448e-06, |
|
"loss": 6.6529, |
|
"step": 1668 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 9.501557632398754e-06, |
|
"loss": 6.6321, |
|
"step": 1669 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 9.42367601246106e-06, |
|
"loss": 6.8933, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 9.345794392523365e-06, |
|
"loss": 6.7081, |
|
"step": 1671 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 9.267912772585669e-06, |
|
"loss": 6.9494, |
|
"step": 1672 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 9.190031152647975e-06, |
|
"loss": 6.7644, |
|
"step": 1673 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 9.11214953271028e-06, |
|
"loss": 6.9127, |
|
"step": 1674 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 9.034267912772586e-06, |
|
"loss": 7.0531, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 8.956386292834892e-06, |
|
"loss": 6.5159, |
|
"step": 1676 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 8.878504672897196e-06, |
|
"loss": 6.5938, |
|
"step": 1677 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 8.800623052959501e-06, |
|
"loss": 6.6733, |
|
"step": 1678 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 8.722741433021807e-06, |
|
"loss": 6.7465, |
|
"step": 1679 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 8.644859813084113e-06, |
|
"loss": 6.7056, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 8.566978193146417e-06, |
|
"loss": 6.5569, |
|
"step": 1681 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 8.489096573208722e-06, |
|
"loss": 6.4809, |
|
"step": 1682 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 8.411214953271028e-06, |
|
"loss": 6.7494, |
|
"step": 1683 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 6.6975, |
|
"step": 1684 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 8.255451713395638e-06, |
|
"loss": 6.8373, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 8.177570093457943e-06, |
|
"loss": 6.7578, |
|
"step": 1686 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 8.099688473520249e-06, |
|
"loss": 6.7829, |
|
"step": 1687 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 8.021806853582555e-06, |
|
"loss": 6.9281, |
|
"step": 1688 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.94392523364486e-06, |
|
"loss": 6.8377, |
|
"step": 1689 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.866043613707164e-06, |
|
"loss": 6.5166, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.78816199376947e-06, |
|
"loss": 6.8095, |
|
"step": 1691 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.710280373831776e-06, |
|
"loss": 6.7521, |
|
"step": 1692 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.632398753894081e-06, |
|
"loss": 6.8423, |
|
"step": 1693 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.554517133956386e-06, |
|
"loss": 6.7741, |
|
"step": 1694 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.476635514018691e-06, |
|
"loss": 6.745, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.398753894080997e-06, |
|
"loss": 6.5862, |
|
"step": 1696 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.3208722741433015e-06, |
|
"loss": 6.3854, |
|
"step": 1697 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.242990654205607e-06, |
|
"loss": 6.5175, |
|
"step": 1698 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.165109034267913e-06, |
|
"loss": 6.4162, |
|
"step": 1699 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.087227414330218e-06, |
|
"loss": 6.4625, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 7.009345794392523e-06, |
|
"loss": 6.8769, |
|
"step": 1701 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 6.931464174454828e-06, |
|
"loss": 6.6893, |
|
"step": 1702 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 6.853582554517134e-06, |
|
"loss": 6.826, |
|
"step": 1703 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.775700934579439e-06, |
|
"loss": 6.8455, |
|
"step": 1704 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.697819314641744e-06, |
|
"loss": 6.787, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.619937694704049e-06, |
|
"loss": 6.7078, |
|
"step": 1706 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.542056074766355e-06, |
|
"loss": 6.7412, |
|
"step": 1707 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.46417445482866e-06, |
|
"loss": 6.8571, |
|
"step": 1708 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.386292834890965e-06, |
|
"loss": 6.9124, |
|
"step": 1709 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.308411214953271e-06, |
|
"loss": 7.0675, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.230529595015577e-06, |
|
"loss": 6.5644, |
|
"step": 1711 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.1526479750778815e-06, |
|
"loss": 6.8401, |
|
"step": 1712 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.074766355140187e-06, |
|
"loss": 6.9041, |
|
"step": 1713 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.996884735202493e-06, |
|
"loss": 6.7827, |
|
"step": 1714 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.919003115264798e-06, |
|
"loss": 6.6781, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.841121495327103e-06, |
|
"loss": 6.8104, |
|
"step": 1716 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.763239875389408e-06, |
|
"loss": 7.0362, |
|
"step": 1717 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.685358255451714e-06, |
|
"loss": 6.9074, |
|
"step": 1718 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.607476635514019e-06, |
|
"loss": 6.5858, |
|
"step": 1719 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.529595015576324e-06, |
|
"loss": 6.9519, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.451713395638629e-06, |
|
"loss": 6.665, |
|
"step": 1721 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.373831775700935e-06, |
|
"loss": 6.6528, |
|
"step": 1722 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.29595015576324e-06, |
|
"loss": 6.8303, |
|
"step": 1723 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.218068535825545e-06, |
|
"loss": 6.7899, |
|
"step": 1724 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.14018691588785e-06, |
|
"loss": 6.8034, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.062305295950156e-06, |
|
"loss": 6.4999, |
|
"step": 1726 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.9844236760124615e-06, |
|
"loss": 6.7517, |
|
"step": 1727 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.906542056074766e-06, |
|
"loss": 6.8789, |
|
"step": 1728 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.828660436137072e-06, |
|
"loss": 6.8491, |
|
"step": 1729 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.750778816199377e-06, |
|
"loss": 6.7971, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.6728971962616825e-06, |
|
"loss": 6.5306, |
|
"step": 1731 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.595015576323987e-06, |
|
"loss": 6.6106, |
|
"step": 1732 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.517133956386293e-06, |
|
"loss": 6.6273, |
|
"step": 1733 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.439252336448598e-06, |
|
"loss": 6.7694, |
|
"step": 1734 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.3613707165109035e-06, |
|
"loss": 6.8861, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.283489096573208e-06, |
|
"loss": 6.832, |
|
"step": 1736 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.205607476635514e-06, |
|
"loss": 6.443, |
|
"step": 1737 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.127725856697819e-06, |
|
"loss": 6.8272, |
|
"step": 1738 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.0498442367601245e-06, |
|
"loss": 6.7221, |
|
"step": 1739 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.97196261682243e-06, |
|
"loss": 6.7032, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.894080996884735e-06, |
|
"loss": 6.6335, |
|
"step": 1741 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.816199376947041e-06, |
|
"loss": 6.7585, |
|
"step": 1742 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.7383177570093455e-06, |
|
"loss": 6.6755, |
|
"step": 1743 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.6604361370716508e-06, |
|
"loss": 6.4035, |
|
"step": 1744 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.5825545171339564e-06, |
|
"loss": 6.4804, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.5046728971962617e-06, |
|
"loss": 6.6317, |
|
"step": 1746 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.426791277258567e-06, |
|
"loss": 6.6527, |
|
"step": 1747 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.348909657320872e-06, |
|
"loss": 6.3416, |
|
"step": 1748 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.2710280373831774e-06, |
|
"loss": 6.3009, |
|
"step": 1749 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.1931464174454827e-06, |
|
"loss": 6.4027, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.1152647975077884e-06, |
|
"loss": 6.6928, |
|
"step": 1751 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.0373831775700936e-06, |
|
"loss": 6.5945, |
|
"step": 1752 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.959501557632399e-06, |
|
"loss": 6.9042, |
|
"step": 1753 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.881619937694704e-06, |
|
"loss": 6.7598, |
|
"step": 1754 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.8037383177570094e-06, |
|
"loss": 7.0008, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.7258566978193146e-06, |
|
"loss": 6.554, |
|
"step": 1756 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.64797507788162e-06, |
|
"loss": 6.5825, |
|
"step": 1757 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.570093457943925e-06, |
|
"loss": 6.635, |
|
"step": 1758 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.4922118380062308e-06, |
|
"loss": 6.9067, |
|
"step": 1759 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.414330218068536e-06, |
|
"loss": 6.8028, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.3364485981308413e-06, |
|
"loss": 6.8325, |
|
"step": 1761 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.2585669781931465e-06, |
|
"loss": 6.6665, |
|
"step": 1762 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.1806853582554518e-06, |
|
"loss": 6.753, |
|
"step": 1763 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.102803738317757e-06, |
|
"loss": 6.8656, |
|
"step": 1764 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.0249221183800623e-06, |
|
"loss": 6.5606, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.9470404984423675e-06, |
|
"loss": 6.7774, |
|
"step": 1766 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.8691588785046728e-06, |
|
"loss": 6.6039, |
|
"step": 1767 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.7912772585669782e-06, |
|
"loss": 6.5504, |
|
"step": 1768 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.7133956386292835e-06, |
|
"loss": 6.6879, |
|
"step": 1769 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.6355140186915887e-06, |
|
"loss": 6.6818, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5576323987538942e-06, |
|
"loss": 6.8449, |
|
"step": 1771 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.4797507788161994e-06, |
|
"loss": 6.6066, |
|
"step": 1772 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.4018691588785047e-06, |
|
"loss": 6.7217, |
|
"step": 1773 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.32398753894081e-06, |
|
"loss": 6.6996, |
|
"step": 1774 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.2461059190031154e-06, |
|
"loss": 6.5485, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.1682242990654206e-06, |
|
"loss": 6.7448, |
|
"step": 1776 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.0903426791277259e-06, |
|
"loss": 6.5528, |
|
"step": 1777 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.0124610591900311e-06, |
|
"loss": 6.6893, |
|
"step": 1778 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.345794392523364e-07, |
|
"loss": 6.8809, |
|
"step": 1779 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 8.566978193146417e-07, |
|
"loss": 6.6042, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 7.788161993769471e-07, |
|
"loss": 6.4283, |
|
"step": 1781 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 7.009345794392523e-07, |
|
"loss": 6.5139, |
|
"step": 1782 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 6.230529595015577e-07, |
|
"loss": 6.2382, |
|
"step": 1783 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 5.451713395638629e-07, |
|
"loss": 5.9313, |
|
"step": 1784 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 1784, |
|
"total_flos": 0.0, |
|
"train_loss": 6.858208657647462, |
|
"train_runtime": 8259.6855, |
|
"train_samples_per_second": 3.455, |
|
"train_steps_per_second": 0.216 |
|
} |
|
], |
|
"max_steps": 1784, |
|
"num_train_epochs": 1, |
|
"total_flos": 0.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|