|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.984988524285094, |
|
"global_step": 4000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2e-05, |
|
"loss": 1.4558, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.9999987833918285e-05, |
|
"loss": 1.4479, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.9999951335702733e-05, |
|
"loss": 1.3372, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.9999890505442158e-05, |
|
"loss": 1.3914, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.999980534328457e-05, |
|
"loss": 1.4222, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.999969584943719e-05, |
|
"loss": 1.4072, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.999956202416644e-05, |
|
"loss": 1.3195, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.999940386779794e-05, |
|
"loss": 1.348, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.9999221380716526e-05, |
|
"loss": 1.3456, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.9999014563366226e-05, |
|
"loss": 1.3545, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.999878341625027e-05, |
|
"loss": 1.2812, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.9998527939931088e-05, |
|
"loss": 1.3375, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.9998248135030315e-05, |
|
"loss": 1.323, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.999794400222877e-05, |
|
"loss": 1.4263, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.9997615542266482e-05, |
|
"loss": 1.3663, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.9997262755942653e-05, |
|
"loss": 1.3438, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.99968856441157e-05, |
|
"loss": 1.2855, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.999648420770321e-05, |
|
"loss": 1.2576, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.9996058447681973e-05, |
|
"loss": 1.2811, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.9995608365087945e-05, |
|
"loss": 1.2616, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.999513396101628e-05, |
|
"loss": 1.2319, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.9994635236621306e-05, |
|
"loss": 1.3689, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.9994112193116527e-05, |
|
"loss": 1.1436, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.999356483177462e-05, |
|
"loss": 1.2203, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.9992993153927432e-05, |
|
"loss": 1.1449, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9992397160965984e-05, |
|
"loss": 1.2228, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.999177685434045e-05, |
|
"loss": 1.2316, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9991132235560174e-05, |
|
"loss": 1.3092, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9990463306193652e-05, |
|
"loss": 1.2577, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9989770067868536e-05, |
|
"loss": 1.1647, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.998905252227162e-05, |
|
"loss": 1.1908, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.998831067114885e-05, |
|
"loss": 1.314, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9987544516305313e-05, |
|
"loss": 1.1445, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.998675405960522e-05, |
|
"loss": 1.1633, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9985939302971936e-05, |
|
"loss": 1.2699, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9985100248387936e-05, |
|
"loss": 1.2089, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9984236897894815e-05, |
|
"loss": 1.2043, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9983349253593302e-05, |
|
"loss": 1.2562, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9982437317643218e-05, |
|
"loss": 1.2421, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9981501092263502e-05, |
|
"loss": 1.2917, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9980540579732198e-05, |
|
"loss": 1.3397, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9979555782386435e-05, |
|
"loss": 1.2327, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9978546702622443e-05, |
|
"loss": 1.2871, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.997751334289553e-05, |
|
"loss": 1.2185, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9976455705720084e-05, |
|
"loss": 1.3285, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.997537379366956e-05, |
|
"loss": 1.2261, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9974267609376494e-05, |
|
"loss": 1.2092, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9973137155532462e-05, |
|
"loss": 1.1791, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9971982434888106e-05, |
|
"loss": 1.2968, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9970803450253116e-05, |
|
"loss": 1.24, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.996960020449621e-05, |
|
"loss": 1.2806, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9968372700545145e-05, |
|
"loss": 1.2578, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9967120941386707e-05, |
|
"loss": 1.2295, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.99658449300667e-05, |
|
"loss": 1.1964, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.996454466968993e-05, |
|
"loss": 1.2623, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9963220163420215e-05, |
|
"loss": 1.1747, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.996187141448036e-05, |
|
"loss": 1.1561, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.996049842615217e-05, |
|
"loss": 1.2927, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.995910120177642e-05, |
|
"loss": 1.2256, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9957679744752858e-05, |
|
"loss": 1.2709, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9956234058540195e-05, |
|
"loss": 1.2031, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9954764146656103e-05, |
|
"loss": 1.1953, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9953270012677195e-05, |
|
"loss": 1.1442, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9951751660239015e-05, |
|
"loss": 1.1697, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.995020909303605e-05, |
|
"loss": 1.2153, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.99486423148217e-05, |
|
"loss": 1.1575, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9947051329408274e-05, |
|
"loss": 1.2231, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9945436140666984e-05, |
|
"loss": 1.171, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9943796752527932e-05, |
|
"loss": 1.2231, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9942133168980105e-05, |
|
"loss": 1.2854, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9940445394071358e-05, |
|
"loss": 1.2368, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.993873343190842e-05, |
|
"loss": 1.2302, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9936997286656855e-05, |
|
"loss": 1.2982, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9935236962541092e-05, |
|
"loss": 1.2573, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9933452463844377e-05, |
|
"loss": 1.2922, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9931643794908774e-05, |
|
"loss": 1.1661, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.992981096013517e-05, |
|
"loss": 1.2003, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.992795396398325e-05, |
|
"loss": 1.1262, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9926072810971494e-05, |
|
"loss": 1.1949, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.992416750567714e-05, |
|
"loss": 1.1951, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9922238052736215e-05, |
|
"loss": 1.2822, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.99202844568435e-05, |
|
"loss": 1.1666, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9918306722752507e-05, |
|
"loss": 1.0392, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9916304855275498e-05, |
|
"loss": 1.2428, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9914278859283446e-05, |
|
"loss": 1.2222, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.991222873970604e-05, |
|
"loss": 1.1914, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9910154501531666e-05, |
|
"loss": 1.1314, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.990805614980739e-05, |
|
"loss": 1.1499, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9905933689638958e-05, |
|
"loss": 1.1887, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9903787126190774e-05, |
|
"loss": 1.302, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.990161646468589e-05, |
|
"loss": 1.2144, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9899421710405996e-05, |
|
"loss": 1.2339, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.989720286869141e-05, |
|
"loss": 1.1867, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9894959944941038e-05, |
|
"loss": 1.259, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.989269294461242e-05, |
|
"loss": 1.2476, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9890401873221642e-05, |
|
"loss": 1.1847, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9888086736343385e-05, |
|
"loss": 1.2072, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.988574753961087e-05, |
|
"loss": 1.17, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9883384288715876e-05, |
|
"loss": 1.1533, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.98809969894087e-05, |
|
"loss": 1.0659, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.987858564749816e-05, |
|
"loss": 1.1814, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9876150268851572e-05, |
|
"loss": 1.1722, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9873690859394738e-05, |
|
"loss": 1.2267, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.987120742511193e-05, |
|
"loss": 1.1396, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9868699972045892e-05, |
|
"loss": 1.0883, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.986616850629779e-05, |
|
"loss": 1.0839, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9863613034027224e-05, |
|
"loss": 1.2437, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9861033561452223e-05, |
|
"loss": 1.1318, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9858430094849196e-05, |
|
"loss": 1.1162, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9855802640552938e-05, |
|
"loss": 1.0525, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9853151204956617e-05, |
|
"loss": 1.136, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.985047579451175e-05, |
|
"loss": 1.0522, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9847776415728186e-05, |
|
"loss": 1.2369, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9845053075174102e-05, |
|
"loss": 1.1296, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.984230577947597e-05, |
|
"loss": 1.2401, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.983953453531856e-05, |
|
"loss": 1.2585, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.98367393494449e-05, |
|
"loss": 1.1647, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9833920228656295e-05, |
|
"loss": 1.1434, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.983107717981226e-05, |
|
"loss": 1.0576, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9828210209830564e-05, |
|
"loss": 1.124, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9825319325687157e-05, |
|
"loss": 1.129, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9822404534416183e-05, |
|
"loss": 1.0831, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9819465843109965e-05, |
|
"loss": 1.1666, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.981650325891897e-05, |
|
"loss": 1.1545, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.981351678905181e-05, |
|
"loss": 1.1523, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.981050644077521e-05, |
|
"loss": 1.178, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9807472221414002e-05, |
|
"loss": 1.1373, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9804414138351097e-05, |
|
"loss": 1.2085, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9801332199027468e-05, |
|
"loss": 1.1745, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9798226410942148e-05, |
|
"loss": 1.1953, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9795096781652184e-05, |
|
"loss": 1.1445, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9791943318772644e-05, |
|
"loss": 1.2227, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9788766029976587e-05, |
|
"loss": 1.1688, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9785564922995042e-05, |
|
"loss": 1.1893, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9782340005616995e-05, |
|
"loss": 1.1779, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.977909128568937e-05, |
|
"loss": 1.1489, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9775818771117e-05, |
|
"loss": 1.1362, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9772522469862628e-05, |
|
"loss": 1.1897, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9769202389946863e-05, |
|
"loss": 1.167, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.976585853944818e-05, |
|
"loss": 1.1824, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.976249092650289e-05, |
|
"loss": 1.1413, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9759099559305124e-05, |
|
"loss": 1.1875, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9755684446106813e-05, |
|
"loss": 1.1451, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9752245595217662e-05, |
|
"loss": 1.0607, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9748783015005144e-05, |
|
"loss": 1.2404, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9745296713894463e-05, |
|
"loss": 1.1807, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.974178670036855e-05, |
|
"loss": 1.0758, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.973825298296802e-05, |
|
"loss": 1.0989, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.973469557029117e-05, |
|
"loss": 1.1169, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.973111447099396e-05, |
|
"loss": 1.1579, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.972750969378998e-05, |
|
"loss": 1.1099, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9723881247450434e-05, |
|
"loss": 1.2141, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.972022914080411e-05, |
|
"loss": 1.2618, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.971655338273738e-05, |
|
"loss": 1.2097, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9712853982194154e-05, |
|
"loss": 1.2092, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9709130948175877e-05, |
|
"loss": 1.0878, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9705384289741493e-05, |
|
"loss": 1.1637, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9701614016007435e-05, |
|
"loss": 1.1152, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9697820136147598e-05, |
|
"loss": 1.0662, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9694002659393306e-05, |
|
"loss": 1.2159, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.969016159503331e-05, |
|
"loss": 1.1588, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9686296952413747e-05, |
|
"loss": 1.1617, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9682408740938132e-05, |
|
"loss": 1.1422, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9678496970067326e-05, |
|
"loss": 1.1713, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.967456164931951e-05, |
|
"loss": 1.0971, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9670602788270172e-05, |
|
"loss": 1.1238, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9666620396552075e-05, |
|
"loss": 1.2025, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9662614483855247e-05, |
|
"loss": 1.0267, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9658585059926935e-05, |
|
"loss": 1.1763, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9654532134571594e-05, |
|
"loss": 1.1421, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9650455717650878e-05, |
|
"loss": 1.1594, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.964635581908359e-05, |
|
"loss": 1.1858, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9642232448845663e-05, |
|
"loss": 1.2112, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9638085616970152e-05, |
|
"loss": 1.1302, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9633915333547202e-05, |
|
"loss": 1.1993, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9629721608724006e-05, |
|
"loss": 1.0864, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.962550445270481e-05, |
|
"loss": 1.0865, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9621263875750864e-05, |
|
"loss": 1.0852, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9616999888180408e-05, |
|
"loss": 1.2186, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9612712500368653e-05, |
|
"loss": 1.092, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.960840172274773e-05, |
|
"loss": 1.1371, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9604067565806707e-05, |
|
"loss": 1.1527, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.959971004009151e-05, |
|
"loss": 1.1696, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9595329156204955e-05, |
|
"loss": 1.1942, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9590924924806677e-05, |
|
"loss": 1.2083, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.958649735661312e-05, |
|
"loss": 1.1935, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9582046462397514e-05, |
|
"loss": 1.1874, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9577572252989856e-05, |
|
"loss": 1.1034, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9573074739276858e-05, |
|
"loss": 1.175, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9568553932201947e-05, |
|
"loss": 1.1102, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9564009842765225e-05, |
|
"loss": 1.1398, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9559442482023444e-05, |
|
"loss": 1.0146, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.955485186108998e-05, |
|
"loss": 1.1453, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9550237991134806e-05, |
|
"loss": 1.1453, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.954560088338447e-05, |
|
"loss": 1.1533, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.954094054912205e-05, |
|
"loss": 1.0356, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9536256999687157e-05, |
|
"loss": 1.0178, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9531550246475875e-05, |
|
"loss": 1.087, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9526820300940756e-05, |
|
"loss": 1.0517, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9522067174590777e-05, |
|
"loss": 1.0938, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9517290878991324e-05, |
|
"loss": 1.0381, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.951249142576416e-05, |
|
"loss": 1.0585, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.950766882658739e-05, |
|
"loss": 1.1217, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.950282309319544e-05, |
|
"loss": 1.2144, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.949795423737903e-05, |
|
"loss": 1.1815, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9493062270985143e-05, |
|
"loss": 1.167, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9488147205916985e-05, |
|
"loss": 1.1621, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9483209054133975e-05, |
|
"loss": 1.1437, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.947824782765171e-05, |
|
"loss": 1.1501, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9473263538541916e-05, |
|
"loss": 1.1792, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9468256198932455e-05, |
|
"loss": 1.1888, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.946322582100727e-05, |
|
"loss": 1.2405, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9458172417006347e-05, |
|
"loss": 1.1984, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9453095999225725e-05, |
|
"loss": 1.1406, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9447996580017418e-05, |
|
"loss": 1.2371, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9442874171789417e-05, |
|
"loss": 1.0566, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9437728787005655e-05, |
|
"loss": 1.0957, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9432560438185964e-05, |
|
"loss": 1.1853, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9427369137906046e-05, |
|
"loss": 1.0938, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.942215489879747e-05, |
|
"loss": 1.1975, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9416917733547605e-05, |
|
"loss": 1.1953, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9411657654899598e-05, |
|
"loss": 1.1901, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.940637467565237e-05, |
|
"loss": 1.1848, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9401068808660544e-05, |
|
"loss": 1.1822, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.939574006683445e-05, |
|
"loss": 1.1888, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9390388463140065e-05, |
|
"loss": 1.1284, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9385014010598998e-05, |
|
"loss": 1.1155, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9379616722288457e-05, |
|
"loss": 1.1359, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9374196611341212e-05, |
|
"loss": 1.2017, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.936875369094556e-05, |
|
"loss": 1.1825, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9363287974345312e-05, |
|
"loss": 1.0658, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9357799474839735e-05, |
|
"loss": 1.1118, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.935228820578354e-05, |
|
"loss": 1.1981, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9346754180586825e-05, |
|
"loss": 1.1582, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.934119741271508e-05, |
|
"loss": 1.0506, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.933561791568913e-05, |
|
"loss": 1.1329, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9330015703085082e-05, |
|
"loss": 1.1007, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9324390788534345e-05, |
|
"loss": 1.064, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9318743185723544e-05, |
|
"loss": 1.084, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9313072908394524e-05, |
|
"loss": 1.148, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9307379970344295e-05, |
|
"loss": 1.0327, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9301664385425006e-05, |
|
"loss": 1.0734, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9295926167543914e-05, |
|
"loss": 1.1937, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9290165330663338e-05, |
|
"loss": 1.1191, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9284381888800646e-05, |
|
"loss": 1.1106, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9278575856028207e-05, |
|
"loss": 0.9996, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9272747246473345e-05, |
|
"loss": 1.1063, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9266896074318335e-05, |
|
"loss": 1.0836, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9261022353800344e-05, |
|
"loss": 1.0874, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9255126099211403e-05, |
|
"loss": 1.1903, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9249207324898377e-05, |
|
"loss": 1.2297, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.924326604526292e-05, |
|
"loss": 1.1173, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9237302274761457e-05, |
|
"loss": 1.1311, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9231316027905128e-05, |
|
"loss": 1.2332, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.922530731925977e-05, |
|
"loss": 0.9706, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9219276163445865e-05, |
|
"loss": 1.1958, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.921322257513852e-05, |
|
"loss": 1.0844, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9207146569067438e-05, |
|
"loss": 1.0564, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.920104816001684e-05, |
|
"loss": 1.1932, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.919492736282548e-05, |
|
"loss": 1.0589, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9188784192386588e-05, |
|
"loss": 1.1334, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9182618663647815e-05, |
|
"loss": 1.1622, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.917643079161124e-05, |
|
"loss": 1.1118, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9170220591333283e-05, |
|
"loss": 1.0058, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9163988077924712e-05, |
|
"loss": 1.1155, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9157733266550577e-05, |
|
"loss": 1.078, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9151456172430186e-05, |
|
"loss": 1.0551, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9145156810837072e-05, |
|
"loss": 1.1194, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.913883519709894e-05, |
|
"loss": 1.1368, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9132491346597644e-05, |
|
"loss": 1.2772, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9126125274769148e-05, |
|
"loss": 1.2117, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9119736997103476e-05, |
|
"loss": 1.1841, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.91133265291447e-05, |
|
"loss": 1.0742, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9106893886490865e-05, |
|
"loss": 1.0796, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.910043908479399e-05, |
|
"loss": 1.0079, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.909396213976e-05, |
|
"loss": 1.088, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9087463067148698e-05, |
|
"loss": 1.0243, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9080941882773744e-05, |
|
"loss": 1.2952, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9074398602502585e-05, |
|
"loss": 1.1602, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.906783324225644e-05, |
|
"loss": 1.1614, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.906124581801025e-05, |
|
"loss": 1.1066, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.905463634579264e-05, |
|
"loss": 1.115, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9048004841685888e-05, |
|
"loss": 1.1365, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9041351321825882e-05, |
|
"loss": 1.1183, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.903467580240207e-05, |
|
"loss": 1.1091, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9027978299657436e-05, |
|
"loss": 1.1599, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9021258829888457e-05, |
|
"loss": 1.225, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9014517409445053e-05, |
|
"loss": 1.0121, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9007754054730555e-05, |
|
"loss": 1.0556, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9000968782201678e-05, |
|
"loss": 1.0731, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.899416160836845e-05, |
|
"loss": 1.1136, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.8987332549794196e-05, |
|
"loss": 1.0963, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.8980481623095502e-05, |
|
"loss": 1.1663, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.8973608844942147e-05, |
|
"loss": 1.1445, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.8966714232057094e-05, |
|
"loss": 1.1259, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.895979780121642e-05, |
|
"loss": 1.1379, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.89528595692493e-05, |
|
"loss": 1.0709, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.8945899553037957e-05, |
|
"loss": 1.0895, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.8938917769517614e-05, |
|
"loss": 1.0493, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.8931914235676457e-05, |
|
"loss": 0.9594, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.892488896855561e-05, |
|
"loss": 1.1143, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.8917841985249055e-05, |
|
"loss": 1.2822, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.8910773302903632e-05, |
|
"loss": 1.0581, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.8903682938718976e-05, |
|
"loss": 1.1096, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.8896570909947477e-05, |
|
"loss": 1.1439, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.8889437233894234e-05, |
|
"loss": 1.0356, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.888228192791703e-05, |
|
"loss": 1.0795, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.8875105009426272e-05, |
|
"loss": 1.1792, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.8867906495884955e-05, |
|
"loss": 1.1395, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.8860686404808618e-05, |
|
"loss": 1.0511, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.8853444753765308e-05, |
|
"loss": 1.0585, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.8846181560375525e-05, |
|
"loss": 1.1337, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.883889684231219e-05, |
|
"loss": 1.0908, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.88315906173006e-05, |
|
"loss": 1.1562, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.882426290311838e-05, |
|
"loss": 0.9822, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.8816913717595445e-05, |
|
"loss": 1.0973, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.8809543078613956e-05, |
|
"loss": 1.0825, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.8802151004108266e-05, |
|
"loss": 1.1721, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.879473751206489e-05, |
|
"loss": 1.0896, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.8787302620522467e-05, |
|
"loss": 1.1183, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.8779846347571695e-05, |
|
"loss": 1.1766, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.877236871135529e-05, |
|
"loss": 1.08, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.876486973006797e-05, |
|
"loss": 1.1069, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.875734942195637e-05, |
|
"loss": 1.1172, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.874980780531903e-05, |
|
"loss": 1.0455, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.874224489850634e-05, |
|
"loss": 1.2435, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.8734660719920477e-05, |
|
"loss": 1.1201, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.87270552880154e-05, |
|
"loss": 0.9779, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.8719428621296765e-05, |
|
"loss": 1.1465, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.8711780738321897e-05, |
|
"loss": 1.1176, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.8704111657699758e-05, |
|
"loss": 1.1217, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.869642139809088e-05, |
|
"loss": 1.0776, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.8688709978207323e-05, |
|
"loss": 1.02, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.8680977416812645e-05, |
|
"loss": 1.0624, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.867322373272184e-05, |
|
"loss": 1.1606, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.866544894480129e-05, |
|
"loss": 1.1472, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.8657653071968748e-05, |
|
"loss": 1.1931, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.864983613319325e-05, |
|
"loss": 1.1593, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.8641998147495114e-05, |
|
"loss": 1.0349, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.8634139133945838e-05, |
|
"loss": 1.0645, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.8626259111668107e-05, |
|
"loss": 1.0791, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.8618358099835724e-05, |
|
"loss": 1.024, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.8610436117673557e-05, |
|
"loss": 1.0797, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.8602493184457505e-05, |
|
"loss": 1.1108, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.8594529319514437e-05, |
|
"loss": 1.1057, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.858654454222217e-05, |
|
"loss": 1.0278, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.8578538872009383e-05, |
|
"loss": 0.9993, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.8570512328355613e-05, |
|
"loss": 1.05, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.856246493079117e-05, |
|
"loss": 1.0813, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.8554396698897116e-05, |
|
"loss": 1.0712, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.8546307652305207e-05, |
|
"loss": 1.0891, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.8538197810697843e-05, |
|
"loss": 1.0363, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.853006719380802e-05, |
|
"loss": 1.0649, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.8521915821419285e-05, |
|
"loss": 1.156, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.85137437133657e-05, |
|
"loss": 1.1517, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.8505550889531766e-05, |
|
"loss": 1.0265, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.8497337369852397e-05, |
|
"loss": 1.1985, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.848910317431286e-05, |
|
"loss": 1.0552, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.848084832294874e-05, |
|
"loss": 1.253, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.847257283584587e-05, |
|
"loss": 1.1016, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.8464276733140307e-05, |
|
"loss": 1.1403, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.845596003501826e-05, |
|
"loss": 1.2197, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.8447622761716058e-05, |
|
"loss": 0.9938, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.8439264933520086e-05, |
|
"loss": 1.0355, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.843088657076675e-05, |
|
"loss": 1.1461, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.842248769384242e-05, |
|
"loss": 1.0829, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.8414068323183378e-05, |
|
"loss": 1.1194, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8405628479275777e-05, |
|
"loss": 1.0746, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8397168182655584e-05, |
|
"loss": 1.0358, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8388687453908527e-05, |
|
"loss": 1.1372, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.838018631367006e-05, |
|
"loss": 1.2092, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8371664782625287e-05, |
|
"loss": 1.0762, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8363122881508947e-05, |
|
"loss": 1.0928, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8354560631105326e-05, |
|
"loss": 1.2407, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8345978052248235e-05, |
|
"loss": 1.1196, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.8337375165820943e-05, |
|
"loss": 1.0321, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.832875199275614e-05, |
|
"loss": 1.1741, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.832010855403586e-05, |
|
"loss": 1.1316, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.831144487069147e-05, |
|
"loss": 1.0432, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.8302760963803582e-05, |
|
"loss": 1.1426, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.829405685450202e-05, |
|
"loss": 1.0476, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.8285332563965765e-05, |
|
"loss": 1.0797, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.8276588113422903e-05, |
|
"loss": 0.9968, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.8267823524150575e-05, |
|
"loss": 1.1027, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.8259038817474923e-05, |
|
"loss": 1.0366, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.825023401477104e-05, |
|
"loss": 1.1276, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.824140913746291e-05, |
|
"loss": 1.0907, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.8232564207023377e-05, |
|
"loss": 1.1748, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.8223699244974065e-05, |
|
"loss": 1.139, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.8214814272885344e-05, |
|
"loss": 1.0471, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.8205909312376277e-05, |
|
"loss": 1.0496, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.8196984385114556e-05, |
|
"loss": 1.1727, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.818803951281646e-05, |
|
"loss": 1.0343, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.81790747172468e-05, |
|
"loss": 1.0835, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.8170090020218866e-05, |
|
"loss": 1.0961, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.8161085443594367e-05, |
|
"loss": 1.0955, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.815206100928338e-05, |
|
"loss": 1.1442, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.8143016739244314e-05, |
|
"loss": 1.1664, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.8133952655483834e-05, |
|
"loss": 1.045, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.8124868780056814e-05, |
|
"loss": 1.1095, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.811576513506629e-05, |
|
"loss": 1.0906, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.8106641742663397e-05, |
|
"loss": 1.0254, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.809749862504733e-05, |
|
"loss": 1.1217, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.808833580446526e-05, |
|
"loss": 1.1943, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.807915330321232e-05, |
|
"loss": 1.1309, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.806995114363152e-05, |
|
"loss": 1.1422, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.8060729348113706e-05, |
|
"loss": 1.0663, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.8051487939097506e-05, |
|
"loss": 1.1041, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.8042226939069256e-05, |
|
"loss": 1.1333, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.8032946370562983e-05, |
|
"loss": 1.1051, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.8023646256160314e-05, |
|
"loss": 1.051, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.801432661849044e-05, |
|
"loss": 1.0166, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.800498748023005e-05, |
|
"loss": 1.0703, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.79956288641033e-05, |
|
"loss": 1.0842, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.798625079288172e-05, |
|
"loss": 1.1841, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.7976853289384184e-05, |
|
"loss": 1.1714, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.7967436376476857e-05, |
|
"loss": 1.1438, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.795800007707312e-05, |
|
"loss": 1.0752, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.7948544414133534e-05, |
|
"loss": 1.1434, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.7939069410665772e-05, |
|
"loss": 1.1209, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.792957508972457e-05, |
|
"loss": 0.9802, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.792006147441166e-05, |
|
"loss": 1.085, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.791052858787573e-05, |
|
"loss": 1.2046, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.790097645331235e-05, |
|
"loss": 1.1376, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.789140509396394e-05, |
|
"loss": 1.0742, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.7881814533119678e-05, |
|
"loss": 1.029, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.7872204794115474e-05, |
|
"loss": 1.1351, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.786257590033391e-05, |
|
"loss": 1.1188, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.7852927875204162e-05, |
|
"loss": 1.097, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.7843260742201964e-05, |
|
"loss": 1.1078, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.7833574524849536e-05, |
|
"loss": 1.1975, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.7823869246715553e-05, |
|
"loss": 1.0818, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.7814144931415043e-05, |
|
"loss": 1.0735, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.780440160260938e-05, |
|
"loss": 1.0704, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.7794639284006184e-05, |
|
"loss": 1.0148, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.7784857999359292e-05, |
|
"loss": 1.1402, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.777505777246868e-05, |
|
"loss": 1.0834, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.7765238627180424e-05, |
|
"loss": 1.1154, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.775540058738663e-05, |
|
"loss": 1.0593, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.774554367702538e-05, |
|
"loss": 1.061, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.7735667920080662e-05, |
|
"loss": 0.9858, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.772577334058233e-05, |
|
"loss": 1.0783, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.7715859962606045e-05, |
|
"loss": 1.1001, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.7705927810273188e-05, |
|
"loss": 1.1298, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.7695976907750846e-05, |
|
"loss": 1.0907, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.7686007279251708e-05, |
|
"loss": 1.0962, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.7676018949034045e-05, |
|
"loss": 1.017, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.766601194140162e-05, |
|
"loss": 1.1945, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.765598628070365e-05, |
|
"loss": 1.0868, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.7645941991334732e-05, |
|
"loss": 0.9812, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.7635879097734806e-05, |
|
"loss": 1.0974, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.7625797624389055e-05, |
|
"loss": 1.1943, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.7615697595827898e-05, |
|
"loss": 1.0878, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.760557903662688e-05, |
|
"loss": 1.0845, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.7595441971406647e-05, |
|
"loss": 1.0385, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.7585286424832876e-05, |
|
"loss": 1.0956, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.7575112421616203e-05, |
|
"loss": 0.9226, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.7564919986512183e-05, |
|
"loss": 0.9761, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.7554709144321212e-05, |
|
"loss": 1.0568, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.754447991988848e-05, |
|
"loss": 1.0809, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.7534232338103904e-05, |
|
"loss": 1.028, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.752396642390207e-05, |
|
"loss": 1.0931, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.7513682202262163e-05, |
|
"loss": 1.0754, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.750337969820792e-05, |
|
"loss": 1.1067, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.7493058936807562e-05, |
|
"loss": 1.1318, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.748271994317374e-05, |
|
"loss": 1.131, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.7472362742463455e-05, |
|
"loss": 0.9948, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.7461987359878022e-05, |
|
"loss": 1.0269, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.7451593820662987e-05, |
|
"loss": 1.1522, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.7441182150108088e-05, |
|
"loss": 0.9819, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.743075237354716e-05, |
|
"loss": 1.0245, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.7420304516358113e-05, |
|
"loss": 1.0467, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.7409838603962844e-05, |
|
"loss": 0.9151, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.739935466182718e-05, |
|
"loss": 1.0757, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.738885271546082e-05, |
|
"loss": 1.1047, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.7378332790417275e-05, |
|
"loss": 1.0851, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.7367794912293794e-05, |
|
"loss": 1.0941, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.735723910673132e-05, |
|
"loss": 1.0764, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.7346665399414404e-05, |
|
"loss": 1.0224, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.7336073816071167e-05, |
|
"loss": 1.1149, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.7325464382473228e-05, |
|
"loss": 1.1105, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.7314837124435625e-05, |
|
"loss": 1.1125, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.7304192067816782e-05, |
|
"loss": 1.0294, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.7293529238518422e-05, |
|
"loss": 1.1923, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.728284866248552e-05, |
|
"loss": 1.107, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.7272150365706225e-05, |
|
"loss": 1.0266, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.7261434374211804e-05, |
|
"loss": 1.0624, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.725070071407659e-05, |
|
"loss": 1.1012, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.723994941141789e-05, |
|
"loss": 1.1323, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.722918049239596e-05, |
|
"loss": 1.0443, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.7218393983213902e-05, |
|
"loss": 0.9999, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.7207589910117634e-05, |
|
"loss": 1.15, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.71967682993958e-05, |
|
"loss": 1.058, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.7185929177379714e-05, |
|
"loss": 1.0251, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.717507257044331e-05, |
|
"loss": 0.9785, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.7164198505003068e-05, |
|
"loss": 0.9978, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.715330700751793e-05, |
|
"loss": 0.9861, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.7142398104489274e-05, |
|
"loss": 1.1539, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.7131471822460814e-05, |
|
"loss": 1.0574, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.7120528188018566e-05, |
|
"loss": 1.1229, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.7109567227790754e-05, |
|
"loss": 1.1081, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.7098588968447768e-05, |
|
"loss": 0.9774, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.7087593436702086e-05, |
|
"loss": 1.0047, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.707658065930822e-05, |
|
"loss": 1.0911, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.7065550663062634e-05, |
|
"loss": 0.9891, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.7054503474803704e-05, |
|
"loss": 1.17, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.704343912141162e-05, |
|
"loss": 1.106, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.703235762980835e-05, |
|
"loss": 1.066, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.7021259026957566e-05, |
|
"loss": 1.1588, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.7010143339864563e-05, |
|
"loss": 1.0991, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.6999010595576213e-05, |
|
"loss": 1.103, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.6987860821180896e-05, |
|
"loss": 1.196, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.697669404380842e-05, |
|
"loss": 1.0892, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.6965510290629973e-05, |
|
"loss": 1.1086, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.6954309588858043e-05, |
|
"loss": 1.0873, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.694309196574637e-05, |
|
"loss": 1.0697, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.6931857448589847e-05, |
|
"loss": 0.9517, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.6920606064724486e-05, |
|
"loss": 1.0223, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.6909337841527344e-05, |
|
"loss": 1.1926, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.6898052806416446e-05, |
|
"loss": 1.1018, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.6886750986850716e-05, |
|
"loss": 1.0406, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.6875432410329934e-05, |
|
"loss": 0.9755, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.686409710439464e-05, |
|
"loss": 1.0135, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.685274509662609e-05, |
|
"loss": 1.1118, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.684137641464617e-05, |
|
"loss": 1.0095, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.682999108611735e-05, |
|
"loss": 1.0496, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.6818589138742586e-05, |
|
"loss": 1.0831, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.6807170600265296e-05, |
|
"loss": 1.2248, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.6795735498469245e-05, |
|
"loss": 1.0273, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.6784283861178514e-05, |
|
"loss": 1.0812, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.6772815716257414e-05, |
|
"loss": 1.0574, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.6761331091610418e-05, |
|
"loss": 1.1536, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.6749830015182106e-05, |
|
"loss": 1.0092, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.6738312514957087e-05, |
|
"loss": 1.0804, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.6726778618959928e-05, |
|
"loss": 1.09, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.6715228355255093e-05, |
|
"loss": 1.1343, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.6703661751946872e-05, |
|
"loss": 1.0558, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.6692078837179317e-05, |
|
"loss": 1.0913, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.6680479639136164e-05, |
|
"loss": 1.0627, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.666886418604077e-05, |
|
"loss": 1.0714, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.665723250615604e-05, |
|
"loss": 1.1603, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.6645584627784383e-05, |
|
"loss": 1.0104, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.663392057926759e-05, |
|
"loss": 1.1624, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.6622240388986824e-05, |
|
"loss": 1.0206, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.6610544085362513e-05, |
|
"loss": 1.0537, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.659883169685429e-05, |
|
"loss": 1.139, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.6587103251960935e-05, |
|
"loss": 1.1376, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.6575358779220295e-05, |
|
"loss": 1.0043, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.6563598307209207e-05, |
|
"loss": 1.0649, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.6551821864543443e-05, |
|
"loss": 1.0059, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.654002947987764e-05, |
|
"loss": 1.0802, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.6528221181905217e-05, |
|
"loss": 1.0779, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.6516396999358323e-05, |
|
"loss": 1.0919, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.6504556961007747e-05, |
|
"loss": 1.0197, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.6492701095662866e-05, |
|
"loss": 0.9916, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.6480829432171567e-05, |
|
"loss": 1.0524, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.646894199942017e-05, |
|
"loss": 0.9794, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.645703882633338e-05, |
|
"loss": 0.9522, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.6445119941874184e-05, |
|
"loss": 1.0006, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.643318537504381e-05, |
|
"loss": 1.1215, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.642123515488164e-05, |
|
"loss": 1.0781, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.6409269310465145e-05, |
|
"loss": 1.04, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.6397287870909814e-05, |
|
"loss": 1.0278, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.638529086536908e-05, |
|
"loss": 1.0284, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.6373278323034254e-05, |
|
"loss": 1.0425, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.636125027313445e-05, |
|
"loss": 1.0831, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.6349206744936518e-05, |
|
"loss": 1.2046, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.6337147767744966e-05, |
|
"loss": 1.1424, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.632507337090189e-05, |
|
"loss": 1.0118, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.631298358378692e-05, |
|
"loss": 0.9081, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.6300878435817115e-05, |
|
"loss": 1.1369, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.628875795644692e-05, |
|
"loss": 0.9326, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.6276622175168083e-05, |
|
"loss": 1.0656, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.6264471121509587e-05, |
|
"loss": 1.0247, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.6252304825037576e-05, |
|
"loss": 1.0212, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.624012331535528e-05, |
|
"loss": 1.0461, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.6227926622102947e-05, |
|
"loss": 1.1057, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.6215714774957775e-05, |
|
"loss": 1.1477, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.620348780363382e-05, |
|
"loss": 1.0714, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.6191245737881956e-05, |
|
"loss": 1.0335, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.617898860748978e-05, |
|
"loss": 1.1108, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.616671644228153e-05, |
|
"loss": 1.0287, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.615442927211805e-05, |
|
"loss": 0.9708, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.6142127126896682e-05, |
|
"loss": 1.037, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.61298100365512e-05, |
|
"loss": 1.108, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.6117478031051756e-05, |
|
"loss": 1.0569, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.610513114040479e-05, |
|
"loss": 1.0704, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.6092769394652946e-05, |
|
"loss": 1.0724, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.608039282387504e-05, |
|
"loss": 0.9622, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.6068001458185934e-05, |
|
"loss": 1.0873, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.605559532773651e-05, |
|
"loss": 1.1178, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.6043174462713565e-05, |
|
"loss": 1.0736, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.6030738893339753e-05, |
|
"loss": 0.9509, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.6018288649873495e-05, |
|
"loss": 1.063, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.600582376260894e-05, |
|
"loss": 1.0378, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.5993344261875847e-05, |
|
"loss": 0.9509, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.5980850178039547e-05, |
|
"loss": 1.1301, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.596834154150084e-05, |
|
"loss": 1.0636, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.5955818382695953e-05, |
|
"loss": 1.0508, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.5943280732096437e-05, |
|
"loss": 0.9618, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.593072862020911e-05, |
|
"loss": 0.9606, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.5918162077575976e-05, |
|
"loss": 1.1394, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.5905581134774154e-05, |
|
"loss": 1.0863, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.589298582241579e-05, |
|
"loss": 1.1145, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.5880376171148014e-05, |
|
"loss": 1.0602, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.586775221165283e-05, |
|
"loss": 0.9911, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.585511397464707e-05, |
|
"loss": 1.1537, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.5842461490882288e-05, |
|
"loss": 1.0622, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.5829794791144723e-05, |
|
"loss": 1.1412, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.581711390625519e-05, |
|
"loss": 0.9844, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.580441886706903e-05, |
|
"loss": 0.9911, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.5791709704476016e-05, |
|
"loss": 0.9844, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.5778986449400294e-05, |
|
"loss": 1.092, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.5766249132800294e-05, |
|
"loss": 0.9114, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.5753497785668662e-05, |
|
"loss": 1.0616, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.5740732439032187e-05, |
|
"loss": 1.0112, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.572795312395172e-05, |
|
"loss": 1.0562, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.571515987152209e-05, |
|
"loss": 0.9613, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.5702352712872055e-05, |
|
"loss": 1.0254, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5689531679164205e-05, |
|
"loss": 1.0329, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5676696801594886e-05, |
|
"loss": 1.0865, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.566384811139413e-05, |
|
"loss": 1.0754, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5650985639825583e-05, |
|
"loss": 1.0884, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5638109418186424e-05, |
|
"loss": 1.1105, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.562521947780728e-05, |
|
"loss": 0.9587, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5612315850052166e-05, |
|
"loss": 1.0646, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5599398566318398e-05, |
|
"loss": 0.9582, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5586467658036526e-05, |
|
"loss": 0.9924, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5573523156670245e-05, |
|
"loss": 1.1182, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5560565093716327e-05, |
|
"loss": 1.0079, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5547593500704547e-05, |
|
"loss": 1.1699, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5534608409197592e-05, |
|
"loss": 1.0251, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5521609850791004e-05, |
|
"loss": 1.0481, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5508597857113077e-05, |
|
"loss": 1.0609, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.549557245982482e-05, |
|
"loss": 1.0378, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5482533690619836e-05, |
|
"loss": 0.9945, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5469481581224274e-05, |
|
"loss": 1.0167, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5456416163396732e-05, |
|
"loss": 0.9759, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5443337468928207e-05, |
|
"loss": 1.1111, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5430245529641987e-05, |
|
"loss": 1.0053, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5417140377393596e-05, |
|
"loss": 1.0869, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5404022044070705e-05, |
|
"loss": 1.0718, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5390890561593054e-05, |
|
"loss": 1.0262, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.537774596191238e-05, |
|
"loss": 1.0595, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5364588277012343e-05, |
|
"loss": 0.9668, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5351417538908436e-05, |
|
"loss": 1.161, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.533823377964791e-05, |
|
"loss": 1.1035, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5325037031309703e-05, |
|
"loss": 1.046, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.5311827326004362e-05, |
|
"loss": 1.0651, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.529860469587396e-05, |
|
"loss": 1.0803, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.5285369173092016e-05, |
|
"loss": 1.1507, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.5272120789863414e-05, |
|
"loss": 1.0646, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.525885957842434e-05, |
|
"loss": 1.1271, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.5245585571042194e-05, |
|
"loss": 1.0881, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.5232298800015506e-05, |
|
"loss": 1.0093, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.5218999297673863e-05, |
|
"loss": 1.1195, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.520568709637783e-05, |
|
"loss": 1.0867, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.5192362228518876e-05, |
|
"loss": 1.0524, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.5179024726519285e-05, |
|
"loss": 1.1189, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.5165674622832084e-05, |
|
"loss": 1.005, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.515231194994097e-05, |
|
"loss": 1.0414, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.5138936740360208e-05, |
|
"loss": 1.0023, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.5125549026634585e-05, |
|
"loss": 1.011, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.5112148841339295e-05, |
|
"loss": 0.9053, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.50987362170799e-05, |
|
"loss": 1.0596, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.5085311186492206e-05, |
|
"loss": 1.1282, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.5071873782242223e-05, |
|
"loss": 1.109, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.505842403702606e-05, |
|
"loss": 1.085, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.5044961983569856e-05, |
|
"loss": 1.0244, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.5031487654629704e-05, |
|
"loss": 1.1299, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.5018001082991553e-05, |
|
"loss": 0.9897, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.500450230147116e-05, |
|
"loss": 0.998, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.4990991342913973e-05, |
|
"loss": 1.035, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.4977468240195085e-05, |
|
"loss": 1.0583, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.4963933026219122e-05, |
|
"loss": 0.9951, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.495038573392019e-05, |
|
"loss": 1.0424, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.4936826396261783e-05, |
|
"loss": 1.0204, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.4923255046236705e-05, |
|
"loss": 0.9811, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.4909671716866985e-05, |
|
"loss": 1.0521, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.4896076441203801e-05, |
|
"loss": 0.9512, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.48824692523274e-05, |
|
"loss": 1.1036, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.486885018334702e-05, |
|
"loss": 1.0224, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.4855219267400798e-05, |
|
"loss": 1.0679, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.4841576537655705e-05, |
|
"loss": 1.0703, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.482792202730745e-05, |
|
"loss": 1.0549, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.4814255769580414e-05, |
|
"loss": 1.0344, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.4800577797727558e-05, |
|
"loss": 1.0001, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.4786888145030343e-05, |
|
"loss": 0.9752, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.4773186844798662e-05, |
|
"loss": 1.1156, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.4759473930370738e-05, |
|
"loss": 1.0112, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.4745749435113059e-05, |
|
"loss": 1.0115, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.4732013392420293e-05, |
|
"loss": 1.0137, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.4718265835715202e-05, |
|
"loss": 1.0115, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.4704506798448568e-05, |
|
"loss": 0.9592, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.46907363140991e-05, |
|
"loss": 1.0084, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.4676954416173375e-05, |
|
"loss": 1.1322, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.4663161138205723e-05, |
|
"loss": 0.9445, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.4649356513758177e-05, |
|
"loss": 1.105, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.4635540576420375e-05, |
|
"loss": 1.0905, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.4621713359809479e-05, |
|
"loss": 1.0894, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.4607874897570104e-05, |
|
"loss": 1.1155, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.459402522337422e-05, |
|
"loss": 1.0367, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.4580164370921079e-05, |
|
"loss": 0.9746, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.4566292373937133e-05, |
|
"loss": 0.9708, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.4552409266175953e-05, |
|
"loss": 1.1791, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.4538515081418143e-05, |
|
"loss": 0.9775, |
|
"step": 1412 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.4524609853471266e-05, |
|
"loss": 1.01, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.4510693616169742e-05, |
|
"loss": 1.0487, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.4496766403374791e-05, |
|
"loss": 0.9583, |
|
"step": 1418 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.4482828248974335e-05, |
|
"loss": 1.1337, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4468879186882916e-05, |
|
"loss": 1.0066, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4454919251041624e-05, |
|
"loss": 1.0504, |
|
"step": 1424 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4440948475418001e-05, |
|
"loss": 0.9998, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4426966894005967e-05, |
|
"loss": 1.0397, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4412974540825732e-05, |
|
"loss": 0.979, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4398971449923721e-05, |
|
"loss": 1.0442, |
|
"step": 1432 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4384957655372484e-05, |
|
"loss": 1.0746, |
|
"step": 1434 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4370933191270617e-05, |
|
"loss": 1.0306, |
|
"step": 1436 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.435689809174267e-05, |
|
"loss": 1.0168, |
|
"step": 1438 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.434285239093908e-05, |
|
"loss": 0.9451, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4328796123036072e-05, |
|
"loss": 1.0496, |
|
"step": 1442 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.431472932223559e-05, |
|
"loss": 1.059, |
|
"step": 1444 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4300652022765207e-05, |
|
"loss": 1.1128, |
|
"step": 1446 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4286564258878032e-05, |
|
"loss": 1.0558, |
|
"step": 1448 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4272466064852644e-05, |
|
"loss": 1.054, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4258357474992998e-05, |
|
"loss": 1.0098, |
|
"step": 1452 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.424423852362835e-05, |
|
"loss": 1.09, |
|
"step": 1454 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.423010924511316e-05, |
|
"loss": 1.0351, |
|
"step": 1456 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.421596967382702e-05, |
|
"loss": 0.9583, |
|
"step": 1458 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4201819844174566e-05, |
|
"loss": 0.985, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.418765979058539e-05, |
|
"loss": 1.014, |
|
"step": 1462 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.4173489547513975e-05, |
|
"loss": 1.0463, |
|
"step": 1464 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.4159309149439585e-05, |
|
"loss": 1.0555, |
|
"step": 1466 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.4145118630866187e-05, |
|
"loss": 1.1, |
|
"step": 1468 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.41309180263224e-05, |
|
"loss": 1.0573, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.411670737036135e-05, |
|
"loss": 0.9699, |
|
"step": 1472 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.4102486697560649e-05, |
|
"loss": 0.9899, |
|
"step": 1474 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.4088256042522263e-05, |
|
"loss": 1.0142, |
|
"step": 1476 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.407401543987246e-05, |
|
"loss": 1.0445, |
|
"step": 1478 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.4059764924261705e-05, |
|
"loss": 1.1021, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.4045504530364585e-05, |
|
"loss": 0.9518, |
|
"step": 1482 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.4031234292879726e-05, |
|
"loss": 0.9748, |
|
"step": 1484 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.4016954246529697e-05, |
|
"loss": 1.037, |
|
"step": 1486 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.4002664426060944e-05, |
|
"loss": 1.1053, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3988364866243693e-05, |
|
"loss": 0.942, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3974055601871867e-05, |
|
"loss": 0.9726, |
|
"step": 1492 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3959736667762998e-05, |
|
"loss": 1.0883, |
|
"step": 1494 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3945408098758155e-05, |
|
"loss": 1.1857, |
|
"step": 1496 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3931069929721841e-05, |
|
"loss": 1.0125, |
|
"step": 1498 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3916722195541927e-05, |
|
"loss": 0.9293, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.3902364931129557e-05, |
|
"loss": 1.0856, |
|
"step": 1502 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.388799817141906e-05, |
|
"loss": 0.9689, |
|
"step": 1504 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.3873621951367861e-05, |
|
"loss": 1.002, |
|
"step": 1506 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.3859236305956425e-05, |
|
"loss": 1.0504, |
|
"step": 1508 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.3844841270188134e-05, |
|
"loss": 1.1238, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.3830436879089228e-05, |
|
"loss": 1.1083, |
|
"step": 1512 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.3816023167708706e-05, |
|
"loss": 1.0482, |
|
"step": 1514 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.3801600171118243e-05, |
|
"loss": 0.9638, |
|
"step": 1516 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.3787167924412113e-05, |
|
"loss": 1.0552, |
|
"step": 1518 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.3772726462707091e-05, |
|
"loss": 0.9825, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.3758275821142382e-05, |
|
"loss": 0.9823, |
|
"step": 1522 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.3743816034879522e-05, |
|
"loss": 0.9553, |
|
"step": 1524 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.37293471391023e-05, |
|
"loss": 0.9501, |
|
"step": 1526 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.3714869169016666e-05, |
|
"loss": 1.0232, |
|
"step": 1528 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.3700382159850656e-05, |
|
"loss": 0.9845, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.3685886146854297e-05, |
|
"loss": 1.2048, |
|
"step": 1532 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.3671381165299524e-05, |
|
"loss": 1.1017, |
|
"step": 1534 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.36568672504801e-05, |
|
"loss": 1.0742, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.3642344437711513e-05, |
|
"loss": 1.0223, |
|
"step": 1538 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.3627812762330913e-05, |
|
"loss": 1.0361, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.3613272259697007e-05, |
|
"loss": 1.0305, |
|
"step": 1542 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.3598722965189985e-05, |
|
"loss": 0.9186, |
|
"step": 1544 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.3584164914211428e-05, |
|
"loss": 1.0692, |
|
"step": 1546 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.3569598142184225e-05, |
|
"loss": 1.0484, |
|
"step": 1548 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.3555022684552484e-05, |
|
"loss": 0.9853, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.3540438576781441e-05, |
|
"loss": 0.9063, |
|
"step": 1552 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.3525845854357392e-05, |
|
"loss": 0.9795, |
|
"step": 1554 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.3511244552787583e-05, |
|
"loss": 0.993, |
|
"step": 1556 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.3496634707600147e-05, |
|
"loss": 1.06, |
|
"step": 1558 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.348201635434399e-05, |
|
"loss": 1.0209, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.3467389528588733e-05, |
|
"loss": 0.9821, |
|
"step": 1562 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.3452754265924601e-05, |
|
"loss": 0.9394, |
|
"step": 1564 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.3438110601962362e-05, |
|
"loss": 1.0648, |
|
"step": 1566 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.3423458572333215e-05, |
|
"loss": 1.0621, |
|
"step": 1568 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.3408798212688718e-05, |
|
"loss": 1.1157, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.33941295587007e-05, |
|
"loss": 0.9739, |
|
"step": 1572 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.3379452646061163e-05, |
|
"loss": 0.9967, |
|
"step": 1574 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.336476751048222e-05, |
|
"loss": 0.9719, |
|
"step": 1576 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.3350074187695979e-05, |
|
"loss": 0.9223, |
|
"step": 1578 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.3335372713454469e-05, |
|
"loss": 1.0656, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.3320663123529562e-05, |
|
"loss": 0.9943, |
|
"step": 1582 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.330594545371287e-05, |
|
"loss": 1.0338, |
|
"step": 1584 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.329121973981567e-05, |
|
"loss": 0.9358, |
|
"step": 1586 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.3276486017668808e-05, |
|
"loss": 1.031, |
|
"step": 1588 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.3261744323122621e-05, |
|
"loss": 0.9805, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.3246994692046837e-05, |
|
"loss": 1.0385, |
|
"step": 1592 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.3232237160330499e-05, |
|
"loss": 1.0981, |
|
"step": 1594 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.321747176388188e-05, |
|
"loss": 0.9949, |
|
"step": 1596 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.3202698538628376e-05, |
|
"loss": 1.0629, |
|
"step": 1598 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.3187917520516448e-05, |
|
"loss": 1.0223, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.3173128745511508e-05, |
|
"loss": 1.0499, |
|
"step": 1602 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.3158332249597843e-05, |
|
"loss": 1.027, |
|
"step": 1604 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.3143528068778527e-05, |
|
"loss": 1.0735, |
|
"step": 1606 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.3128716239075338e-05, |
|
"loss": 1.0389, |
|
"step": 1608 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.3113896796528664e-05, |
|
"loss": 1.0096, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.3099069777197413e-05, |
|
"loss": 0.9816, |
|
"step": 1612 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.3084235217158929e-05, |
|
"loss": 0.9918, |
|
"step": 1614 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.3069393152508907e-05, |
|
"loss": 1.0969, |
|
"step": 1616 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.3054543619361302e-05, |
|
"loss": 0.9449, |
|
"step": 1618 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.303968665384824e-05, |
|
"loss": 0.961, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.3024822292119933e-05, |
|
"loss": 0.995, |
|
"step": 1622 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.3009950570344589e-05, |
|
"loss": 1.0909, |
|
"step": 1624 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.2995071524708324e-05, |
|
"loss": 1.0801, |
|
"step": 1626 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.2980185191415074e-05, |
|
"loss": 0.8789, |
|
"step": 1628 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.296529160668651e-05, |
|
"loss": 1.0743, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.2950390806761943e-05, |
|
"loss": 0.9788, |
|
"step": 1632 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.2935482827898249e-05, |
|
"loss": 1.0473, |
|
"step": 1634 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.292056770636976e-05, |
|
"loss": 1.041, |
|
"step": 1636 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.2905645478468194e-05, |
|
"loss": 1.1584, |
|
"step": 1638 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.2890716180502566e-05, |
|
"loss": 1.0537, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.2875779848799078e-05, |
|
"loss": 1.064, |
|
"step": 1642 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.2860836519701064e-05, |
|
"loss": 1.0758, |
|
"step": 1644 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.2845886229568873e-05, |
|
"loss": 0.9112, |
|
"step": 1646 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.2830929014779795e-05, |
|
"loss": 1.0018, |
|
"step": 1648 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.2815964911727972e-05, |
|
"loss": 0.9525, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.2800993956824303e-05, |
|
"loss": 1.0736, |
|
"step": 1652 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.278601618649636e-05, |
|
"loss": 0.9452, |
|
"step": 1654 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.27710316371883e-05, |
|
"loss": 0.9938, |
|
"step": 1656 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.2756040345360771e-05, |
|
"loss": 1.0259, |
|
"step": 1658 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.274104234749083e-05, |
|
"loss": 0.988, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.2726037680071851e-05, |
|
"loss": 1.0983, |
|
"step": 1662 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.2711026379613437e-05, |
|
"loss": 1.0379, |
|
"step": 1664 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.2696008482641324e-05, |
|
"loss": 1.0761, |
|
"step": 1666 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.2680984025697313e-05, |
|
"loss": 1.1011, |
|
"step": 1668 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.2665953045339151e-05, |
|
"loss": 1.0651, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.265091557814047e-05, |
|
"loss": 1.0795, |
|
"step": 1672 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.2635871660690677e-05, |
|
"loss": 0.9955, |
|
"step": 1674 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.2620821329594879e-05, |
|
"loss": 0.9747, |
|
"step": 1676 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.2605764621473793e-05, |
|
"loss": 0.9265, |
|
"step": 1678 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.2590701572963642e-05, |
|
"loss": 1.0082, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.2575632220716079e-05, |
|
"loss": 1.0273, |
|
"step": 1682 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2560556601398101e-05, |
|
"loss": 1.0197, |
|
"step": 1684 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2545474751691953e-05, |
|
"loss": 0.924, |
|
"step": 1686 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2530386708295037e-05, |
|
"loss": 0.9471, |
|
"step": 1688 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2515292507919829e-05, |
|
"loss": 1.0104, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.250019218729378e-05, |
|
"loss": 0.9895, |
|
"step": 1692 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2485085783159239e-05, |
|
"loss": 1.0076, |
|
"step": 1694 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2469973332273355e-05, |
|
"loss": 0.9243, |
|
"step": 1696 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2454854871407993e-05, |
|
"loss": 0.9458, |
|
"step": 1698 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2439730437349636e-05, |
|
"loss": 1.0546, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2424600066899304e-05, |
|
"loss": 1.0387, |
|
"step": 1702 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.2409463796872463e-05, |
|
"loss": 0.9942, |
|
"step": 1704 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.2394321664098932e-05, |
|
"loss": 1.0612, |
|
"step": 1706 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.2379173705422795e-05, |
|
"loss": 0.9398, |
|
"step": 1708 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.2364019957702316e-05, |
|
"loss": 0.9349, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.234886045780984e-05, |
|
"loss": 1.0658, |
|
"step": 1712 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.2333695242631705e-05, |
|
"loss": 1.0601, |
|
"step": 1714 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.2318524349068171e-05, |
|
"loss": 1.0399, |
|
"step": 1716 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.2303347814033292e-05, |
|
"loss": 1.0008, |
|
"step": 1718 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.2288165674454871e-05, |
|
"loss": 1.0375, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.227297796727433e-05, |
|
"loss": 1.036, |
|
"step": 1722 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.2257784729446655e-05, |
|
"loss": 1.0603, |
|
"step": 1724 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.2242585997940275e-05, |
|
"loss": 1.0761, |
|
"step": 1726 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.2227381809736991e-05, |
|
"loss": 1.0275, |
|
"step": 1728 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.2212172201831885e-05, |
|
"loss": 1.0424, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.2196957211233223e-05, |
|
"loss": 0.9656, |
|
"step": 1732 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.218173687496237e-05, |
|
"loss": 1.0192, |
|
"step": 1734 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.2166511230053697e-05, |
|
"loss": 0.9529, |
|
"step": 1736 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.2151280313554486e-05, |
|
"loss": 1.0015, |
|
"step": 1738 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.2136044162524857e-05, |
|
"loss": 0.98, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.2120802814037664e-05, |
|
"loss": 1.0416, |
|
"step": 1742 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.2105556305178398e-05, |
|
"loss": 0.9874, |
|
"step": 1744 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.2090304673045124e-05, |
|
"loss": 1.0901, |
|
"step": 1746 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.2075047954748354e-05, |
|
"loss": 1.0148, |
|
"step": 1748 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.2059786187410984e-05, |
|
"loss": 0.9968, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.2044519408168201e-05, |
|
"loss": 1.0267, |
|
"step": 1752 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.2029247654167379e-05, |
|
"loss": 0.9508, |
|
"step": 1754 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.2013970962568003e-05, |
|
"loss": 1.0579, |
|
"step": 1756 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.1998689370541563e-05, |
|
"loss": 1.0135, |
|
"step": 1758 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.1983402915271478e-05, |
|
"loss": 0.9435, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.1968111633953009e-05, |
|
"loss": 1.0969, |
|
"step": 1762 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.195281556379314e-05, |
|
"loss": 1.1063, |
|
"step": 1764 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.193751474201053e-05, |
|
"loss": 0.8378, |
|
"step": 1766 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.1922209205835381e-05, |
|
"loss": 1.0479, |
|
"step": 1768 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.190689899250938e-05, |
|
"loss": 0.8978, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.189158413928558e-05, |
|
"loss": 0.9973, |
|
"step": 1772 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.1876264683428345e-05, |
|
"loss": 0.9548, |
|
"step": 1774 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.1860940662213212e-05, |
|
"loss": 0.985, |
|
"step": 1776 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.1845612112926843e-05, |
|
"loss": 1.0125, |
|
"step": 1778 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.183027907286692e-05, |
|
"loss": 0.9479, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.1814941579342045e-05, |
|
"loss": 1.1257, |
|
"step": 1782 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.1799599669671655e-05, |
|
"loss": 0.9444, |
|
"step": 1784 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.1784253381185937e-05, |
|
"loss": 0.9679, |
|
"step": 1786 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.176890275122573e-05, |
|
"loss": 1.1349, |
|
"step": 1788 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.175354781714244e-05, |
|
"loss": 1.0984, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.1738188616297941e-05, |
|
"loss": 1.0563, |
|
"step": 1792 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.1722825186064494e-05, |
|
"loss": 0.996, |
|
"step": 1794 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.1707457563824647e-05, |
|
"loss": 1.023, |
|
"step": 1796 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.169208578697115e-05, |
|
"loss": 0.9333, |
|
"step": 1798 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.1676709892906858e-05, |
|
"loss": 0.954, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.1661329919044657e-05, |
|
"loss": 0.925, |
|
"step": 1802 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.164594590280734e-05, |
|
"loss": 1.057, |
|
"step": 1804 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1630557881627554e-05, |
|
"loss": 1.1391, |
|
"step": 1806 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1615165892947679e-05, |
|
"loss": 0.9459, |
|
"step": 1808 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1599769974219756e-05, |
|
"loss": 0.9985, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.158437016290539e-05, |
|
"loss": 1.0195, |
|
"step": 1812 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1568966496475649e-05, |
|
"loss": 0.9744, |
|
"step": 1814 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1553559012410984e-05, |
|
"loss": 1.0585, |
|
"step": 1816 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1538147748201139e-05, |
|
"loss": 1.0978, |
|
"step": 1818 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1522732741345053e-05, |
|
"loss": 1.0574, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1507314029350775e-05, |
|
"loss": 1.096, |
|
"step": 1822 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.1491891649735367e-05, |
|
"loss": 0.9316, |
|
"step": 1824 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.1476465640024814e-05, |
|
"loss": 1.0443, |
|
"step": 1826 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.1461036037753935e-05, |
|
"loss": 0.9713, |
|
"step": 1828 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.144560288046629e-05, |
|
"loss": 1.0231, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.1430166205714089e-05, |
|
"loss": 1.0991, |
|
"step": 1832 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.1414726051058103e-05, |
|
"loss": 1.0327, |
|
"step": 1834 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.139928245406757e-05, |
|
"loss": 0.9886, |
|
"step": 1836 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.1383835452320098e-05, |
|
"loss": 0.9413, |
|
"step": 1838 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.1368385083401585e-05, |
|
"loss": 0.9911, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.1352931384906126e-05, |
|
"loss": 0.9354, |
|
"step": 1842 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1337474394435908e-05, |
|
"loss": 1.034, |
|
"step": 1844 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1322014149601137e-05, |
|
"loss": 0.9278, |
|
"step": 1846 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1306550688019926e-05, |
|
"loss": 1.0181, |
|
"step": 1848 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.129108404731823e-05, |
|
"loss": 1.0501, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1275614265129731e-05, |
|
"loss": 0.961, |
|
"step": 1852 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.126014137909575e-05, |
|
"loss": 1.0528, |
|
"step": 1854 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1244665426865173e-05, |
|
"loss": 1.1379, |
|
"step": 1856 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1229186446094338e-05, |
|
"loss": 0.8832, |
|
"step": 1858 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1213704474446952e-05, |
|
"loss": 0.998, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1198219549594e-05, |
|
"loss": 1.0951, |
|
"step": 1862 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.1182731709213658e-05, |
|
"loss": 0.9905, |
|
"step": 1864 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.1167240990991192e-05, |
|
"loss": 0.9449, |
|
"step": 1866 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.1151747432618871e-05, |
|
"loss": 1.0222, |
|
"step": 1868 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.1136251071795871e-05, |
|
"loss": 0.9773, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.1120751946228196e-05, |
|
"loss": 0.9687, |
|
"step": 1872 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.1105250093628566e-05, |
|
"loss": 0.9521, |
|
"step": 1874 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.1089745551716345e-05, |
|
"loss": 1.0412, |
|
"step": 1876 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.1074238358217437e-05, |
|
"loss": 1.0068, |
|
"step": 1878 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.1058728550864197e-05, |
|
"loss": 0.9283, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.1043216167395345e-05, |
|
"loss": 1.0227, |
|
"step": 1882 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.1027701245555867e-05, |
|
"loss": 1.0745, |
|
"step": 1884 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.1012183823096919e-05, |
|
"loss": 0.9773, |
|
"step": 1886 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.0996663937775751e-05, |
|
"loss": 0.9932, |
|
"step": 1888 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.09811416273556e-05, |
|
"loss": 1.0579, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.096561692960561e-05, |
|
"loss": 0.9958, |
|
"step": 1892 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.0950089882300721e-05, |
|
"loss": 1.0588, |
|
"step": 1894 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.0934560523221602e-05, |
|
"loss": 1.0609, |
|
"step": 1896 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.0919028890154544e-05, |
|
"loss": 0.9774, |
|
"step": 1898 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.0903495020891374e-05, |
|
"loss": 0.9962, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.0887958953229349e-05, |
|
"loss": 0.9606, |
|
"step": 1902 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.0872420724971089e-05, |
|
"loss": 0.8738, |
|
"step": 1904 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.0856880373924461e-05, |
|
"loss": 1.0137, |
|
"step": 1906 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.0841337937902501e-05, |
|
"loss": 1.0548, |
|
"step": 1908 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.0825793454723325e-05, |
|
"loss": 0.9973, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.0810246962210019e-05, |
|
"loss": 1.0262, |
|
"step": 1912 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.0794698498190557e-05, |
|
"loss": 1.0173, |
|
"step": 1914 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.0779148100497723e-05, |
|
"loss": 0.9807, |
|
"step": 1916 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.0763595806968996e-05, |
|
"loss": 1.0609, |
|
"step": 1918 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.0748041655446473e-05, |
|
"loss": 1.0371, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.0732485683776767e-05, |
|
"loss": 1.0745, |
|
"step": 1922 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.0716927929810925e-05, |
|
"loss": 1.0435, |
|
"step": 1924 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0701368431404327e-05, |
|
"loss": 0.9273, |
|
"step": 1926 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0685807226416599e-05, |
|
"loss": 0.99, |
|
"step": 1928 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0670244352711518e-05, |
|
"loss": 0.9298, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0654679848156925e-05, |
|
"loss": 1.0696, |
|
"step": 1932 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0639113750624626e-05, |
|
"loss": 0.9104, |
|
"step": 1934 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0623546097990302e-05, |
|
"loss": 0.9689, |
|
"step": 1936 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0607976928133423e-05, |
|
"loss": 1.0285, |
|
"step": 1938 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0592406278937143e-05, |
|
"loss": 1.0515, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0576834188288226e-05, |
|
"loss": 1.0184, |
|
"step": 1942 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0561260694076936e-05, |
|
"loss": 1.0958, |
|
"step": 1944 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.0545685834196948e-05, |
|
"loss": 0.9664, |
|
"step": 1946 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.0530109646545272e-05, |
|
"loss": 1.017, |
|
"step": 1948 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.051453216902214e-05, |
|
"loss": 0.866, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.0498953439530926e-05, |
|
"loss": 1.0004, |
|
"step": 1952 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.0483373495978046e-05, |
|
"loss": 1.0156, |
|
"step": 1954 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.0467792376272879e-05, |
|
"loss": 0.961, |
|
"step": 1956 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.0452210118327652e-05, |
|
"loss": 0.9923, |
|
"step": 1958 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.0436626760057379e-05, |
|
"loss": 1.0013, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.0421042339379733e-05, |
|
"loss": 0.9669, |
|
"step": 1962 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.0405456894214987e-05, |
|
"loss": 1.0208, |
|
"step": 1964 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0389870462485903e-05, |
|
"loss": 0.9873, |
|
"step": 1966 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0374283082117635e-05, |
|
"loss": 0.9581, |
|
"step": 1968 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0358694791037654e-05, |
|
"loss": 0.9205, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0343105627175645e-05, |
|
"loss": 1.031, |
|
"step": 1972 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0327515628463415e-05, |
|
"loss": 1.1879, |
|
"step": 1974 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0311924832834807e-05, |
|
"loss": 0.9777, |
|
"step": 1976 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0296333278225599e-05, |
|
"loss": 1.0311, |
|
"step": 1978 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0280741002573413e-05, |
|
"loss": 1.0438, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0265148043817632e-05, |
|
"loss": 0.937, |
|
"step": 1982 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0249554439899299e-05, |
|
"loss": 0.9321, |
|
"step": 1984 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0233960228761022e-05, |
|
"loss": 0.9695, |
|
"step": 1986 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0218365448346893e-05, |
|
"loss": 1.0823, |
|
"step": 1988 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0202770136602389e-05, |
|
"loss": 0.9352, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0187174331474272e-05, |
|
"loss": 0.9465, |
|
"step": 1992 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0171578070910513e-05, |
|
"loss": 1.0222, |
|
"step": 1994 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0155981392860187e-05, |
|
"loss": 0.9526, |
|
"step": 1996 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0140384335273386e-05, |
|
"loss": 1.0049, |
|
"step": 1998 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0124786936101128e-05, |
|
"loss": 1.0112, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0109189233295255e-05, |
|
"loss": 0.9654, |
|
"step": 2002 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0093591264808357e-05, |
|
"loss": 1.1046, |
|
"step": 2004 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.0077993068593663e-05, |
|
"loss": 1.0173, |
|
"step": 2006 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.0062394682604964e-05, |
|
"loss": 0.9919, |
|
"step": 2008 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.0046796144796499e-05, |
|
"loss": 0.9841, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.003119749312289e-05, |
|
"loss": 0.9512, |
|
"step": 2012 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.0015598765539032e-05, |
|
"loss": 1.1299, |
|
"step": 2014 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1e-05, |
|
"loss": 0.7274, |
|
"step": 2016 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.984401234460971e-06, |
|
"loss": 0.6941, |
|
"step": 2018 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.968802506877112e-06, |
|
"loss": 0.6611, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.953203855203505e-06, |
|
"loss": 0.6265, |
|
"step": 2022 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.937605317395041e-06, |
|
"loss": 0.7557, |
|
"step": 2024 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.922006931406337e-06, |
|
"loss": 0.6676, |
|
"step": 2026 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.906408735191644e-06, |
|
"loss": 0.7461, |
|
"step": 2028 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.890810766704745e-06, |
|
"loss": 0.7283, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.875213063898875e-06, |
|
"loss": 0.6275, |
|
"step": 2032 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.859615664726616e-06, |
|
"loss": 0.7321, |
|
"step": 2034 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.844018607139818e-06, |
|
"loss": 0.6703, |
|
"step": 2036 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.828421929089494e-06, |
|
"loss": 0.7174, |
|
"step": 2038 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.812825668525733e-06, |
|
"loss": 0.7463, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.797229863397616e-06, |
|
"loss": 0.645, |
|
"step": 2042 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 9.781634551653108e-06, |
|
"loss": 0.6774, |
|
"step": 2044 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.766039771238982e-06, |
|
"loss": 0.6713, |
|
"step": 2046 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.750445560100705e-06, |
|
"loss": 0.7031, |
|
"step": 2048 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.73485195618237e-06, |
|
"loss": 0.6404, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.719258997426588e-06, |
|
"loss": 0.6305, |
|
"step": 2052 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.703666721774403e-06, |
|
"loss": 0.7386, |
|
"step": 2054 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.688075167165196e-06, |
|
"loss": 0.6172, |
|
"step": 2056 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.672484371536586e-06, |
|
"loss": 0.6311, |
|
"step": 2058 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.656894372824358e-06, |
|
"loss": 0.675, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.64130520896235e-06, |
|
"loss": 0.6526, |
|
"step": 2062 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 9.625716917882368e-06, |
|
"loss": 0.5974, |
|
"step": 2064 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.610129537514102e-06, |
|
"loss": 0.5904, |
|
"step": 2066 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.594543105785013e-06, |
|
"loss": 0.772, |
|
"step": 2068 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.578957660620267e-06, |
|
"loss": 0.663, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.563373239942623e-06, |
|
"loss": 0.6589, |
|
"step": 2072 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.547789881672348e-06, |
|
"loss": 0.7155, |
|
"step": 2074 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.532207623727126e-06, |
|
"loss": 0.6798, |
|
"step": 2076 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.516626504021957e-06, |
|
"loss": 0.6863, |
|
"step": 2078 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.501046560469079e-06, |
|
"loss": 0.6884, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.485467830977864e-06, |
|
"loss": 0.6154, |
|
"step": 2082 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.469890353454731e-06, |
|
"loss": 0.6493, |
|
"step": 2084 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.454314165803054e-06, |
|
"loss": 0.656, |
|
"step": 2086 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.438739305923067e-06, |
|
"loss": 0.6525, |
|
"step": 2088 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.423165811711777e-06, |
|
"loss": 0.6226, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.407593721062858e-06, |
|
"loss": 0.6402, |
|
"step": 2092 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.39202307186658e-06, |
|
"loss": 0.6573, |
|
"step": 2094 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.376453902009701e-06, |
|
"loss": 0.626, |
|
"step": 2096 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.360886249375375e-06, |
|
"loss": 0.6027, |
|
"step": 2098 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.345320151843078e-06, |
|
"loss": 0.6264, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.329755647288484e-06, |
|
"loss": 0.6712, |
|
"step": 2102 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.314192773583403e-06, |
|
"loss": 0.6839, |
|
"step": 2104 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.298631568595675e-06, |
|
"loss": 0.631, |
|
"step": 2106 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.283072070189074e-06, |
|
"loss": 0.6935, |
|
"step": 2108 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.267514316223234e-06, |
|
"loss": 0.6987, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.251958344553529e-06, |
|
"loss": 0.5988, |
|
"step": 2112 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.23640419303101e-06, |
|
"loss": 0.7042, |
|
"step": 2114 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.220851899502284e-06, |
|
"loss": 0.6016, |
|
"step": 2116 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.205301501809448e-06, |
|
"loss": 0.6374, |
|
"step": 2118 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.189753037789988e-06, |
|
"loss": 0.6465, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.174206545276678e-06, |
|
"loss": 0.6615, |
|
"step": 2122 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.1586620620975e-06, |
|
"loss": 0.5189, |
|
"step": 2124 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.143119626075542e-06, |
|
"loss": 0.6731, |
|
"step": 2126 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.127579275028914e-06, |
|
"loss": 0.806, |
|
"step": 2128 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.112041046770653e-06, |
|
"loss": 0.6313, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.096504979108629e-06, |
|
"loss": 0.6204, |
|
"step": 2132 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.080971109845458e-06, |
|
"loss": 0.7086, |
|
"step": 2134 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.0654394767784e-06, |
|
"loss": 0.7032, |
|
"step": 2136 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.049910117699282e-06, |
|
"loss": 0.662, |
|
"step": 2138 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.034383070394394e-06, |
|
"loss": 0.6562, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.0188583726444e-06, |
|
"loss": 0.6316, |
|
"step": 2142 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.00333606222425e-06, |
|
"loss": 0.6871, |
|
"step": 2144 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 8.987816176903083e-06, |
|
"loss": 0.6765, |
|
"step": 2146 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 8.972298754444135e-06, |
|
"loss": 0.6408, |
|
"step": 2148 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 8.956783832604655e-06, |
|
"loss": 0.6632, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 8.941271449135806e-06, |
|
"loss": 0.6747, |
|
"step": 2152 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 8.925761641782568e-06, |
|
"loss": 0.7084, |
|
"step": 2154 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 8.91025444828366e-06, |
|
"loss": 0.6591, |
|
"step": 2156 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 8.89474990637144e-06, |
|
"loss": 0.7025, |
|
"step": 2158 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 8.879248053771809e-06, |
|
"loss": 0.6193, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 8.86374892820413e-06, |
|
"loss": 0.6767, |
|
"step": 2162 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 8.848252567381132e-06, |
|
"loss": 0.698, |
|
"step": 2164 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 8.832759009008811e-06, |
|
"loss": 0.7042, |
|
"step": 2166 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 8.817268290786343e-06, |
|
"loss": 0.6342, |
|
"step": 2168 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 8.801780450406002e-06, |
|
"loss": 0.6638, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 8.786295525553053e-06, |
|
"loss": 0.6281, |
|
"step": 2172 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 8.770813553905666e-06, |
|
"loss": 0.7123, |
|
"step": 2174 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 8.755334573134829e-06, |
|
"loss": 0.7082, |
|
"step": 2176 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 8.739858620904252e-06, |
|
"loss": 0.6898, |
|
"step": 2178 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 8.724385734870272e-06, |
|
"loss": 0.6796, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 8.708915952681771e-06, |
|
"loss": 0.7019, |
|
"step": 2182 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 8.693449311980074e-06, |
|
"loss": 0.7283, |
|
"step": 2184 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 8.677985850398866e-06, |
|
"loss": 0.7167, |
|
"step": 2186 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 8.662525605564094e-06, |
|
"loss": 0.7631, |
|
"step": 2188 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 8.647068615093876e-06, |
|
"loss": 0.7207, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 8.631614916598419e-06, |
|
"loss": 0.5781, |
|
"step": 2192 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 8.616164547679907e-06, |
|
"loss": 0.7202, |
|
"step": 2194 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 8.600717545932435e-06, |
|
"loss": 0.7038, |
|
"step": 2196 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 8.5852739489419e-06, |
|
"loss": 0.6923, |
|
"step": 2198 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 8.569833794285916e-06, |
|
"loss": 0.6168, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 8.554397119533715e-06, |
|
"loss": 0.5952, |
|
"step": 2202 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 8.53896396224607e-06, |
|
"loss": 0.7057, |
|
"step": 2204 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 8.52353435997519e-06, |
|
"loss": 0.663, |
|
"step": 2206 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 8.508108350264635e-06, |
|
"loss": 0.6468, |
|
"step": 2208 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 8.492685970649228e-06, |
|
"loss": 0.6709, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 8.47726725865495e-06, |
|
"loss": 0.6867, |
|
"step": 2212 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 8.461852251798865e-06, |
|
"loss": 0.7253, |
|
"step": 2214 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 8.446440987589019e-06, |
|
"loss": 0.674, |
|
"step": 2216 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 8.431033503524354e-06, |
|
"loss": 0.6776, |
|
"step": 2218 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 8.415629837094612e-06, |
|
"loss": 0.6784, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 8.400230025780242e-06, |
|
"loss": 0.6327, |
|
"step": 2222 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 8.384834107052321e-06, |
|
"loss": 0.7206, |
|
"step": 2224 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 8.369442118372448e-06, |
|
"loss": 0.7468, |
|
"step": 2226 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 8.35405409719266e-06, |
|
"loss": 0.6357, |
|
"step": 2228 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 8.338670080955348e-06, |
|
"loss": 0.6712, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 8.323290107093143e-06, |
|
"loss": 0.6257, |
|
"step": 2232 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 8.307914213028856e-06, |
|
"loss": 0.6354, |
|
"step": 2234 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 8.292542436175358e-06, |
|
"loss": 0.6703, |
|
"step": 2236 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 8.277174813935509e-06, |
|
"loss": 0.663, |
|
"step": 2238 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 8.26181138370206e-06, |
|
"loss": 0.7074, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 8.246452182857561e-06, |
|
"loss": 0.6303, |
|
"step": 2242 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 8.231097248774273e-06, |
|
"loss": 0.6227, |
|
"step": 2244 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 8.215746618814066e-06, |
|
"loss": 0.6354, |
|
"step": 2246 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 8.200400330328347e-06, |
|
"loss": 0.7508, |
|
"step": 2248 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 8.185058420657957e-06, |
|
"loss": 0.6727, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 8.16972092713308e-06, |
|
"loss": 0.6373, |
|
"step": 2252 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 8.154387887073159e-06, |
|
"loss": 0.7155, |
|
"step": 2254 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 8.139059337786793e-06, |
|
"loss": 0.6555, |
|
"step": 2256 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 8.12373531657166e-06, |
|
"loss": 0.5803, |
|
"step": 2258 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 8.108415860714418e-06, |
|
"loss": 0.7269, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 8.09310100749062e-06, |
|
"loss": 0.6762, |
|
"step": 2262 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 8.07779079416462e-06, |
|
"loss": 0.6646, |
|
"step": 2264 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 8.062485257989471e-06, |
|
"loss": 0.7103, |
|
"step": 2266 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 8.047184436206865e-06, |
|
"loss": 0.6179, |
|
"step": 2268 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 8.031888366046998e-06, |
|
"loss": 0.6409, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 8.016597084728525e-06, |
|
"loss": 0.6268, |
|
"step": 2272 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 8.001310629458443e-06, |
|
"loss": 0.6717, |
|
"step": 2274 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 7.986029037432e-06, |
|
"loss": 0.6976, |
|
"step": 2276 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 7.970752345832624e-06, |
|
"loss": 0.6657, |
|
"step": 2278 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 7.9554805918318e-06, |
|
"loss": 0.6034, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 7.940213812589018e-06, |
|
"loss": 0.6882, |
|
"step": 2282 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 7.924952045251651e-06, |
|
"loss": 0.6342, |
|
"step": 2284 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 7.909695326954878e-06, |
|
"loss": 0.6281, |
|
"step": 2286 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 7.894443694821604e-06, |
|
"loss": 0.6404, |
|
"step": 2288 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 7.879197185962339e-06, |
|
"loss": 0.6916, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 7.863955837475144e-06, |
|
"loss": 0.6599, |
|
"step": 2292 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 7.848719686445516e-06, |
|
"loss": 0.6776, |
|
"step": 2294 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 7.833488769946308e-06, |
|
"loss": 0.6596, |
|
"step": 2296 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 7.818263125037633e-06, |
|
"loss": 0.6694, |
|
"step": 2298 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 7.803042788766779e-06, |
|
"loss": 0.7016, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 7.787827798168115e-06, |
|
"loss": 0.6071, |
|
"step": 2302 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 7.772618190263009e-06, |
|
"loss": 0.5724, |
|
"step": 2304 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 7.757414002059726e-06, |
|
"loss": 0.6416, |
|
"step": 2306 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 7.74221527055335e-06, |
|
"loss": 0.6277, |
|
"step": 2308 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 7.727022032725671e-06, |
|
"loss": 0.6148, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 7.711834325545135e-06, |
|
"loss": 0.6552, |
|
"step": 2312 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 7.696652185966713e-06, |
|
"loss": 0.6757, |
|
"step": 2314 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 7.681475650931834e-06, |
|
"loss": 0.6434, |
|
"step": 2316 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 7.666304757368297e-06, |
|
"loss": 0.7195, |
|
"step": 2318 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 7.651139542190164e-06, |
|
"loss": 0.6691, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 7.635980042297688e-06, |
|
"loss": 0.6192, |
|
"step": 2322 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 7.620826294577208e-06, |
|
"loss": 0.7051, |
|
"step": 2324 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 7.605678335901071e-06, |
|
"loss": 0.6658, |
|
"step": 2326 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 7.59053620312754e-06, |
|
"loss": 0.6588, |
|
"step": 2328 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 7.575399933100698e-06, |
|
"loss": 0.6761, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 7.560269562650368e-06, |
|
"loss": 0.7106, |
|
"step": 2332 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 7.545145128592009e-06, |
|
"loss": 0.6396, |
|
"step": 2334 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 7.530026667726645e-06, |
|
"loss": 0.6142, |
|
"step": 2336 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 7.51491421684076e-06, |
|
"loss": 0.6932, |
|
"step": 2338 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 7.49980781270622e-06, |
|
"loss": 0.7122, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 7.484707492080172e-06, |
|
"loss": 0.6239, |
|
"step": 2342 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 7.469613291704962e-06, |
|
"loss": 0.6901, |
|
"step": 2344 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 7.454525248308051e-06, |
|
"loss": 0.6955, |
|
"step": 2346 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 7.439443398601903e-06, |
|
"loss": 0.6267, |
|
"step": 2348 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 7.424367779283926e-06, |
|
"loss": 0.6965, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 7.409298427036365e-06, |
|
"loss": 0.673, |
|
"step": 2352 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 7.3942353785262096e-06, |
|
"loss": 0.681, |
|
"step": 2354 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 7.379178670405123e-06, |
|
"loss": 0.692, |
|
"step": 2356 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 7.364128339309326e-06, |
|
"loss": 0.689, |
|
"step": 2358 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 7.349084421859533e-06, |
|
"loss": 0.7263, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 7.334046954660852e-06, |
|
"loss": 0.719, |
|
"step": 2362 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 7.31901597430269e-06, |
|
"loss": 0.6281, |
|
"step": 2364 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 7.303991517358678e-06, |
|
"loss": 0.631, |
|
"step": 2366 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 7.288973620386568e-06, |
|
"loss": 0.6206, |
|
"step": 2368 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 7.273962319928152e-06, |
|
"loss": 0.6568, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 7.2589576525091706e-06, |
|
"loss": 0.7039, |
|
"step": 2372 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 7.2439596546392295e-06, |
|
"loss": 0.6154, |
|
"step": 2374 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 7.228968362811702e-06, |
|
"loss": 0.7513, |
|
"step": 2376 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 7.21398381350364e-06, |
|
"loss": 0.6674, |
|
"step": 2378 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 7.199006043175698e-06, |
|
"loss": 0.6029, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 7.1840350882720276e-06, |
|
"loss": 0.606, |
|
"step": 2382 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 7.169070985220209e-06, |
|
"loss": 0.6944, |
|
"step": 2384 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 7.154113770431133e-06, |
|
"loss": 0.6939, |
|
"step": 2386 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 7.139163480298941e-06, |
|
"loss": 0.6111, |
|
"step": 2388 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 7.124220151200927e-06, |
|
"loss": 0.6834, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 7.10928381949744e-06, |
|
"loss": 0.7028, |
|
"step": 2392 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 7.094354521531808e-06, |
|
"loss": 0.6278, |
|
"step": 2394 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 7.079432293630244e-06, |
|
"loss": 0.6528, |
|
"step": 2396 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 7.064517172101754e-06, |
|
"loss": 0.6989, |
|
"step": 2398 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 7.0496091932380595e-06, |
|
"loss": 0.6507, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 7.034708393313494e-06, |
|
"loss": 0.7065, |
|
"step": 2402 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 7.019814808584928e-06, |
|
"loss": 0.6606, |
|
"step": 2404 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 7.004928475291679e-06, |
|
"loss": 0.6786, |
|
"step": 2406 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 6.9900494296554125e-06, |
|
"loss": 0.6859, |
|
"step": 2408 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 6.97517770788007e-06, |
|
"loss": 0.6581, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 6.960313346151761e-06, |
|
"loss": 0.6455, |
|
"step": 2412 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 6.9454563806387e-06, |
|
"loss": 0.7072, |
|
"step": 2414 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 6.930606847491094e-06, |
|
"loss": 0.6631, |
|
"step": 2416 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 6.915764782841073e-06, |
|
"loss": 0.6231, |
|
"step": 2418 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 6.900930222802589e-06, |
|
"loss": 0.6292, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 6.886103203471337e-06, |
|
"loss": 0.6917, |
|
"step": 2422 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 6.871283760924665e-06, |
|
"loss": 0.6955, |
|
"step": 2424 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 6.856471931221478e-06, |
|
"loss": 0.6725, |
|
"step": 2426 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 6.841667750402163e-06, |
|
"loss": 0.6991, |
|
"step": 2428 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 6.8268712544884965e-06, |
|
"loss": 0.6314, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 6.812082479483554e-06, |
|
"loss": 0.6324, |
|
"step": 2432 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 6.797301461371626e-06, |
|
"loss": 0.592, |
|
"step": 2434 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 6.782528236118124e-06, |
|
"loss": 0.634, |
|
"step": 2436 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 6.767762839669503e-06, |
|
"loss": 0.6593, |
|
"step": 2438 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 6.7530053079531664e-06, |
|
"loss": 0.6776, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 6.738255676877381e-06, |
|
"loss": 0.6862, |
|
"step": 2442 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 6.723513982331194e-06, |
|
"loss": 0.6113, |
|
"step": 2444 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 6.708780260184333e-06, |
|
"loss": 0.599, |
|
"step": 2446 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 6.694054546287132e-06, |
|
"loss": 0.6889, |
|
"step": 2448 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 6.679336876470441e-06, |
|
"loss": 0.7056, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 6.664627286545536e-06, |
|
"loss": 0.6923, |
|
"step": 2452 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 6.649925812304025e-06, |
|
"loss": 0.6434, |
|
"step": 2454 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 6.635232489517782e-06, |
|
"loss": 0.6766, |
|
"step": 2456 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 6.620547353938837e-06, |
|
"loss": 0.728, |
|
"step": 2458 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 6.605870441299302e-06, |
|
"loss": 0.6419, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 6.591201787311286e-06, |
|
"loss": 0.6459, |
|
"step": 2462 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 6.5765414276667895e-06, |
|
"loss": 0.6458, |
|
"step": 2464 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 6.561889398037643e-06, |
|
"loss": 0.6214, |
|
"step": 2466 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 6.547245734075403e-06, |
|
"loss": 0.7162, |
|
"step": 2468 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 6.532610471411274e-06, |
|
"loss": 0.6957, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 6.517983645656014e-06, |
|
"loss": 0.6705, |
|
"step": 2472 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 6.503365292399857e-06, |
|
"loss": 0.6689, |
|
"step": 2474 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 6.488755447212418e-06, |
|
"loss": 0.6398, |
|
"step": 2476 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 6.4741541456426115e-06, |
|
"loss": 0.651, |
|
"step": 2478 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 6.459561423218561e-06, |
|
"loss": 0.6459, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 6.444977315447521e-06, |
|
"loss": 0.6324, |
|
"step": 2482 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 6.4304018578157755e-06, |
|
"loss": 0.6501, |
|
"step": 2484 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 6.415835085788574e-06, |
|
"loss": 0.6496, |
|
"step": 2486 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 6.401277034810017e-06, |
|
"loss": 0.6259, |
|
"step": 2488 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 6.386727740302995e-06, |
|
"loss": 0.6057, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 6.37218723766909e-06, |
|
"loss": 0.638, |
|
"step": 2492 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 6.357655562288487e-06, |
|
"loss": 0.7214, |
|
"step": 2494 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 6.3431327495199025e-06, |
|
"loss": 0.6199, |
|
"step": 2496 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 6.328618834700475e-06, |
|
"loss": 0.5947, |
|
"step": 2498 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 6.314113853145704e-06, |
|
"loss": 0.7102, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 6.299617840149349e-06, |
|
"loss": 0.6289, |
|
"step": 2502 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 6.28513083098334e-06, |
|
"loss": 0.65, |
|
"step": 2504 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 6.270652860897705e-06, |
|
"loss": 0.6168, |
|
"step": 2506 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 6.25618396512048e-06, |
|
"loss": 0.6898, |
|
"step": 2508 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 6.241724178857621e-06, |
|
"loss": 0.7599, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 6.227273537292911e-06, |
|
"loss": 0.6371, |
|
"step": 2512 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 6.212832075587891e-06, |
|
"loss": 0.6176, |
|
"step": 2514 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 6.19839982888176e-06, |
|
"loss": 0.6578, |
|
"step": 2516 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 6.1839768322912966e-06, |
|
"loss": 0.6292, |
|
"step": 2518 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 6.169563120910775e-06, |
|
"loss": 0.6349, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 6.155158729811867e-06, |
|
"loss": 0.6743, |
|
"step": 2522 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 6.140763694043578e-06, |
|
"loss": 0.6624, |
|
"step": 2524 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 6.12637804863214e-06, |
|
"loss": 0.6474, |
|
"step": 2526 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 6.112001828580945e-06, |
|
"loss": 0.6349, |
|
"step": 2528 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 6.097635068870445e-06, |
|
"loss": 0.6611, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 6.083277804458072e-06, |
|
"loss": 0.6373, |
|
"step": 2532 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 6.068930070278159e-06, |
|
"loss": 0.741, |
|
"step": 2534 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 6.054591901241846e-06, |
|
"loss": 0.7184, |
|
"step": 2536 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 6.0402633322370015e-06, |
|
"loss": 0.6419, |
|
"step": 2538 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 6.025944398128137e-06, |
|
"loss": 0.6488, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 6.011635133756309e-06, |
|
"loss": 0.6785, |
|
"step": 2542 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 5.99733557393906e-06, |
|
"loss": 0.6235, |
|
"step": 2544 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 5.983045753470308e-06, |
|
"loss": 0.7035, |
|
"step": 2546 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 5.96876570712028e-06, |
|
"loss": 0.6098, |
|
"step": 2548 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 5.954495469635418e-06, |
|
"loss": 0.6903, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 5.9402350757382965e-06, |
|
"loss": 0.6815, |
|
"step": 2552 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 5.925984560127542e-06, |
|
"loss": 0.6078, |
|
"step": 2554 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 5.911743957477739e-06, |
|
"loss": 0.658, |
|
"step": 2556 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 5.897513302439355e-06, |
|
"loss": 0.7074, |
|
"step": 2558 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 5.883292629638651e-06, |
|
"loss": 0.7053, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 5.869081973677604e-06, |
|
"loss": 0.6775, |
|
"step": 2562 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 5.8548813691338135e-06, |
|
"loss": 0.656, |
|
"step": 2564 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 5.84069085056042e-06, |
|
"loss": 0.7382, |
|
"step": 2566 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 5.826510452486027e-06, |
|
"loss": 0.7037, |
|
"step": 2568 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 5.81234020941461e-06, |
|
"loss": 0.7243, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 5.798180155825437e-06, |
|
"loss": 0.6948, |
|
"step": 2572 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 5.784030326172981e-06, |
|
"loss": 0.6497, |
|
"step": 2574 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 5.76989075488684e-06, |
|
"loss": 0.6378, |
|
"step": 2576 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 5.755761476371653e-06, |
|
"loss": 0.6219, |
|
"step": 2578 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 5.741642525007003e-06, |
|
"loss": 0.5762, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 5.727533935147359e-06, |
|
"loss": 0.662, |
|
"step": 2582 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 5.7134357411219755e-06, |
|
"loss": 0.6347, |
|
"step": 2584 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 5.699347977234799e-06, |
|
"loss": 0.6086, |
|
"step": 2586 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 5.6852706777644115e-06, |
|
"loss": 0.64, |
|
"step": 2588 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 5.671203876963931e-06, |
|
"loss": 0.6484, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 5.657147609060924e-06, |
|
"loss": 0.6643, |
|
"step": 2592 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 5.643101908257334e-06, |
|
"loss": 0.6417, |
|
"step": 2594 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 5.629066808729385e-06, |
|
"loss": 0.7175, |
|
"step": 2596 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 5.615042344627515e-06, |
|
"loss": 0.6875, |
|
"step": 2598 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 5.601028550076278e-06, |
|
"loss": 0.6308, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 5.587025459174271e-06, |
|
"loss": 0.6949, |
|
"step": 2602 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 5.573033105994038e-06, |
|
"loss": 0.6179, |
|
"step": 2604 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 5.559051524582002e-06, |
|
"loss": 0.6499, |
|
"step": 2606 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 5.545080748958378e-06, |
|
"loss": 0.6996, |
|
"step": 2608 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 5.531120813117086e-06, |
|
"loss": 0.65, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 5.517171751025668e-06, |
|
"loss": 0.6405, |
|
"step": 2612 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 5.503233596625211e-06, |
|
"loss": 0.6725, |
|
"step": 2614 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 5.489306383830258e-06, |
|
"loss": 0.7136, |
|
"step": 2616 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 5.475390146528738e-06, |
|
"loss": 0.6647, |
|
"step": 2618 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 5.461484918581859e-06, |
|
"loss": 0.5684, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 5.44759073382405e-06, |
|
"loss": 0.6602, |
|
"step": 2622 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 5.43370762606287e-06, |
|
"loss": 0.691, |
|
"step": 2624 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 5.419835629078928e-06, |
|
"loss": 0.6414, |
|
"step": 2626 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 5.405974776625785e-06, |
|
"loss": 0.6204, |
|
"step": 2628 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 5.392125102429899e-06, |
|
"loss": 0.7113, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 5.378286640190522e-06, |
|
"loss": 0.6489, |
|
"step": 2632 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 5.364459423579629e-06, |
|
"loss": 0.6971, |
|
"step": 2634 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 5.350643486241826e-06, |
|
"loss": 0.6009, |
|
"step": 2636 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 5.33683886179428e-06, |
|
"loss": 0.657, |
|
"step": 2638 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 5.3230455838266275e-06, |
|
"loss": 0.6851, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 5.309263685900899e-06, |
|
"loss": 0.6631, |
|
"step": 2642 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 5.295493201551433e-06, |
|
"loss": 0.6206, |
|
"step": 2644 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 5.281734164284802e-06, |
|
"loss": 0.6186, |
|
"step": 2646 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 5.267986607579711e-06, |
|
"loss": 0.6177, |
|
"step": 2648 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 5.254250564886944e-06, |
|
"loss": 0.6329, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 5.240526069629265e-06, |
|
"loss": 0.615, |
|
"step": 2652 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 5.22681315520134e-06, |
|
"loss": 0.6553, |
|
"step": 2654 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 5.213111854969661e-06, |
|
"loss": 0.6016, |
|
"step": 2656 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 5.1994222022724486e-06, |
|
"loss": 0.7026, |
|
"step": 2658 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 5.1857442304195895e-06, |
|
"loss": 0.7029, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 5.172077972692553e-06, |
|
"loss": 0.7096, |
|
"step": 2662 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 5.158423462344297e-06, |
|
"loss": 0.6248, |
|
"step": 2664 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 5.144780732599202e-06, |
|
"loss": 0.6935, |
|
"step": 2666 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 5.13114981665298e-06, |
|
"loss": 0.6597, |
|
"step": 2668 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 5.117530747672603e-06, |
|
"loss": 0.635, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 5.103923558796203e-06, |
|
"loss": 0.6966, |
|
"step": 2672 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 5.090328283133019e-06, |
|
"loss": 0.6497, |
|
"step": 2674 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 5.076744953763299e-06, |
|
"loss": 0.7003, |
|
"step": 2676 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 5.06317360373822e-06, |
|
"loss": 0.6528, |
|
"step": 2678 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 5.049614266079813e-06, |
|
"loss": 0.6376, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 5.036066973780882e-06, |
|
"loss": 0.5926, |
|
"step": 2682 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 5.022531759804919e-06, |
|
"loss": 0.6111, |
|
"step": 2684 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 5.009008657086025e-06, |
|
"loss": 0.6644, |
|
"step": 2686 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 4.99549769852884e-06, |
|
"loss": 0.608, |
|
"step": 2688 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 4.981998917008448e-06, |
|
"loss": 0.6816, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.9685123453703e-06, |
|
"loss": 0.6094, |
|
"step": 2692 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.955038016430149e-06, |
|
"loss": 0.6026, |
|
"step": 2694 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.941575962973946e-06, |
|
"loss": 0.6461, |
|
"step": 2696 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.928126217757783e-06, |
|
"loss": 0.6004, |
|
"step": 2698 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.914688813507798e-06, |
|
"loss": 0.6385, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.901263782920105e-06, |
|
"loss": 0.7059, |
|
"step": 2702 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.8878511586607055e-06, |
|
"loss": 0.6244, |
|
"step": 2704 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.874450973365419e-06, |
|
"loss": 0.6682, |
|
"step": 2706 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.861063259639793e-06, |
|
"loss": 0.6546, |
|
"step": 2708 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 4.847688050059033e-06, |
|
"loss": 0.6224, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.8343253771679155e-06, |
|
"loss": 0.6317, |
|
"step": 2712 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.8209752734807205e-06, |
|
"loss": 0.5852, |
|
"step": 2714 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.8076377714811285e-06, |
|
"loss": 0.6472, |
|
"step": 2716 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.7943129036221735e-06, |
|
"loss": 0.6486, |
|
"step": 2718 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.781000702326141e-06, |
|
"loss": 0.6729, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.767701199984497e-06, |
|
"loss": 0.6962, |
|
"step": 2722 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.754414428957806e-06, |
|
"loss": 0.6928, |
|
"step": 2724 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.74114042157566e-06, |
|
"loss": 0.625, |
|
"step": 2726 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.7278792101365864e-06, |
|
"loss": 0.6235, |
|
"step": 2728 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.714630826907986e-06, |
|
"loss": 0.6811, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.701395304126039e-06, |
|
"loss": 0.6498, |
|
"step": 2732 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.688172673995638e-06, |
|
"loss": 0.6704, |
|
"step": 2734 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.6749629686902984e-06, |
|
"loss": 0.6476, |
|
"step": 2736 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.661766220352098e-06, |
|
"loss": 0.6569, |
|
"step": 2738 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.64858246109157e-06, |
|
"loss": 0.637, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.63541172298766e-06, |
|
"loss": 0.5807, |
|
"step": 2742 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.622254038087622e-06, |
|
"loss": 0.6747, |
|
"step": 2744 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.60910943840695e-06, |
|
"loss": 0.5375, |
|
"step": 2746 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.595977955929298e-06, |
|
"loss": 0.6979, |
|
"step": 2748 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.582859622606406e-06, |
|
"loss": 0.6505, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.569754470358015e-06, |
|
"loss": 0.605, |
|
"step": 2752 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.556662531071796e-06, |
|
"loss": 0.6785, |
|
"step": 2754 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.543583836603271e-06, |
|
"loss": 0.5867, |
|
"step": 2756 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.530518418775734e-06, |
|
"loss": 0.5912, |
|
"step": 2758 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.517466309380167e-06, |
|
"loss": 0.6986, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.504427540175182e-06, |
|
"loss": 0.6569, |
|
"step": 2762 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.4914021428869225e-06, |
|
"loss": 0.7133, |
|
"step": 2764 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.4783901492089985e-06, |
|
"loss": 0.5889, |
|
"step": 2766 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.465391590802407e-06, |
|
"loss": 0.6494, |
|
"step": 2768 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 4.452406499295452e-06, |
|
"loss": 0.6486, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.439434906283674e-06, |
|
"loss": 0.728, |
|
"step": 2772 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.4264768433297565e-06, |
|
"loss": 0.6382, |
|
"step": 2774 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.413532341963477e-06, |
|
"loss": 0.6125, |
|
"step": 2776 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.4006014336816035e-06, |
|
"loss": 0.6633, |
|
"step": 2778 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.387684149947837e-06, |
|
"loss": 0.6362, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.3747805221927266e-06, |
|
"loss": 0.6853, |
|
"step": 2782 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.36189058181358e-06, |
|
"loss": 0.6875, |
|
"step": 2784 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.349014360174417e-06, |
|
"loss": 0.6433, |
|
"step": 2786 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.336151888605871e-06, |
|
"loss": 0.6001, |
|
"step": 2788 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.323303198405117e-06, |
|
"loss": 0.6404, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.310468320835797e-06, |
|
"loss": 0.7064, |
|
"step": 2792 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.297647287127946e-06, |
|
"loss": 0.5622, |
|
"step": 2794 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.284840128477913e-06, |
|
"loss": 0.673, |
|
"step": 2796 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.272046876048286e-06, |
|
"loss": 0.7006, |
|
"step": 2798 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.259267560967813e-06, |
|
"loss": 0.6462, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.24650221433134e-06, |
|
"loss": 0.6858, |
|
"step": 2802 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.233750867199708e-06, |
|
"loss": 0.5764, |
|
"step": 2804 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.221013550599707e-06, |
|
"loss": 0.6528, |
|
"step": 2806 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.208290295523985e-06, |
|
"loss": 0.6805, |
|
"step": 2808 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 4.195581132930975e-06, |
|
"loss": 0.629, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.1828860937448135e-06, |
|
"loss": 0.7001, |
|
"step": 2812 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.170205208855281e-06, |
|
"loss": 0.6914, |
|
"step": 2814 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.157538509117714e-06, |
|
"loss": 0.6342, |
|
"step": 2816 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.144886025352934e-06, |
|
"loss": 0.671, |
|
"step": 2818 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.13224778834717e-06, |
|
"loss": 0.6501, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.119623828851987e-06, |
|
"loss": 0.6591, |
|
"step": 2822 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.107014177584211e-06, |
|
"loss": 0.6779, |
|
"step": 2824 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.094418865225853e-06, |
|
"loss": 0.5933, |
|
"step": 2826 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.081837922424027e-06, |
|
"loss": 0.6545, |
|
"step": 2828 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.069271379790891e-06, |
|
"loss": 0.6536, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.056719267903564e-06, |
|
"loss": 0.653, |
|
"step": 2832 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.044181617304048e-06, |
|
"loss": 0.647, |
|
"step": 2834 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.0316584584991605e-06, |
|
"loss": 0.6862, |
|
"step": 2836 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.019149821960455e-06, |
|
"loss": 0.6298, |
|
"step": 2838 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 4.0066557381241524e-06, |
|
"loss": 0.6679, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 3.994176237391059e-06, |
|
"loss": 0.6181, |
|
"step": 2842 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 3.981711350126501e-06, |
|
"loss": 0.5991, |
|
"step": 2844 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 3.969261106660252e-06, |
|
"loss": 0.6876, |
|
"step": 2846 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 3.956825537286436e-06, |
|
"loss": 0.6379, |
|
"step": 2848 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 3.944404672263494e-06, |
|
"loss": 0.6842, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 3.931998541814069e-06, |
|
"loss": 0.6142, |
|
"step": 2852 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 3.9196071761249665e-06, |
|
"loss": 0.6461, |
|
"step": 2854 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 3.907230605347057e-06, |
|
"loss": 0.6672, |
|
"step": 2856 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 3.894868859595216e-06, |
|
"loss": 0.7138, |
|
"step": 2858 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 3.882521968948246e-06, |
|
"loss": 0.6803, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 3.870189963448801e-06, |
|
"loss": 0.6145, |
|
"step": 2862 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 3.857872873103322e-06, |
|
"loss": 0.6199, |
|
"step": 2864 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 3.845570727881951e-06, |
|
"loss": 0.6396, |
|
"step": 2866 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 3.833283557718471e-06, |
|
"loss": 0.6507, |
|
"step": 2868 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 3.821011392510227e-06, |
|
"loss": 0.668, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 3.808754262118046e-06, |
|
"loss": 0.6487, |
|
"step": 2872 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 3.7965121963661823e-06, |
|
"loss": 0.5782, |
|
"step": 2874 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 3.7842852250422293e-06, |
|
"loss": 0.6581, |
|
"step": 2876 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 3.772073377897052e-06, |
|
"loss": 0.6742, |
|
"step": 2878 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 3.7598766846447186e-06, |
|
"loss": 0.6115, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 3.7476951749624236e-06, |
|
"loss": 0.6028, |
|
"step": 2882 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 3.735528878490412e-06, |
|
"loss": 0.6697, |
|
"step": 2884 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 3.7233778248319175e-06, |
|
"loss": 0.655, |
|
"step": 2886 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 3.711242043553085e-06, |
|
"loss": 0.7478, |
|
"step": 2888 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 3.6991215641828903e-06, |
|
"loss": 0.6176, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 3.687016416213084e-06, |
|
"loss": 0.6596, |
|
"step": 2892 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 3.674926629098113e-06, |
|
"loss": 0.7006, |
|
"step": 2894 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 3.6628522322550396e-06, |
|
"loss": 0.6353, |
|
"step": 2896 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 3.650793255063485e-06, |
|
"loss": 0.6088, |
|
"step": 2898 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 3.6387497268655526e-06, |
|
"loss": 0.6223, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 3.6267216769657486e-06, |
|
"loss": 0.6713, |
|
"step": 2902 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 3.614709134630923e-06, |
|
"loss": 0.6149, |
|
"step": 2904 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 3.6027121290901888e-06, |
|
"loss": 0.6516, |
|
"step": 2906 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 3.590730689534857e-06, |
|
"loss": 0.6587, |
|
"step": 2908 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 3.578764845118362e-06, |
|
"loss": 0.702, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 3.5668146249561943e-06, |
|
"loss": 0.6854, |
|
"step": 2912 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 3.554880058125819e-06, |
|
"loss": 0.6114, |
|
"step": 2914 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 3.5429611736666237e-06, |
|
"loss": 0.6724, |
|
"step": 2916 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 3.53105800057983e-06, |
|
"loss": 0.7208, |
|
"step": 2918 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 3.519170567828435e-06, |
|
"loss": 0.6079, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 3.507298904337134e-06, |
|
"loss": 0.6364, |
|
"step": 2922 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 3.4954430389922534e-06, |
|
"loss": 0.652, |
|
"step": 2924 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 3.4836030006416777e-06, |
|
"loss": 0.6373, |
|
"step": 2926 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 3.4717788180947855e-06, |
|
"loss": 0.6763, |
|
"step": 2928 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 3.459970520122364e-06, |
|
"loss": 0.6164, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 3.4481781354565604e-06, |
|
"loss": 0.6251, |
|
"step": 2932 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 3.436401692790797e-06, |
|
"loss": 0.686, |
|
"step": 2934 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 3.424641220779711e-06, |
|
"loss": 0.6603, |
|
"step": 2936 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 3.4128967480390673e-06, |
|
"loss": 0.5985, |
|
"step": 2938 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 3.4011683031457134e-06, |
|
"loss": 0.6048, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 3.389455914637493e-06, |
|
"loss": 0.6333, |
|
"step": 2942 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 3.3777596110131805e-06, |
|
"loss": 0.5561, |
|
"step": 2944 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 3.366079420732413e-06, |
|
"loss": 0.6097, |
|
"step": 2946 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 3.3544153722156214e-06, |
|
"loss": 0.6303, |
|
"step": 2948 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 3.342767493843959e-06, |
|
"loss": 0.6627, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 3.331135813959232e-06, |
|
"loss": 0.6805, |
|
"step": 2952 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 3.319520360863837e-06, |
|
"loss": 0.621, |
|
"step": 2954 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 3.307921162820685e-06, |
|
"loss": 0.675, |
|
"step": 2956 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 3.2963382480531292e-06, |
|
"loss": 0.6276, |
|
"step": 2958 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 3.2847716447449095e-06, |
|
"loss": 0.6127, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 3.2732213810400746e-06, |
|
"loss": 0.6163, |
|
"step": 2962 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 3.2616874850429148e-06, |
|
"loss": 0.5749, |
|
"step": 2964 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 3.250169984817897e-06, |
|
"loss": 0.6647, |
|
"step": 2966 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 3.2386689083895863e-06, |
|
"loss": 0.6217, |
|
"step": 2968 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 3.2271842837425917e-06, |
|
"loss": 0.6624, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 3.2157161388214884e-06, |
|
"loss": 0.5726, |
|
"step": 2972 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 3.204264501530756e-06, |
|
"loss": 0.5676, |
|
"step": 2974 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 3.192829399734706e-06, |
|
"loss": 0.6874, |
|
"step": 2976 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 3.1814108612574134e-06, |
|
"loss": 0.6216, |
|
"step": 2978 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 3.1700089138826564e-06, |
|
"loss": 0.6749, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 3.158623585353833e-06, |
|
"loss": 0.6845, |
|
"step": 2982 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 3.147254903373913e-06, |
|
"loss": 0.6404, |
|
"step": 2984 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 3.1359028956053617e-06, |
|
"loss": 0.6706, |
|
"step": 2986 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 3.124567589670069e-06, |
|
"loss": 0.6484, |
|
"step": 2988 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 3.1132490131492844e-06, |
|
"loss": 0.6283, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 3.1019471935835565e-06, |
|
"loss": 0.6465, |
|
"step": 2992 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 3.090662158472655e-06, |
|
"loss": 0.6932, |
|
"step": 2994 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 3.079393935275513e-06, |
|
"loss": 0.6287, |
|
"step": 2996 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 3.0681425514101547e-06, |
|
"loss": 0.6626, |
|
"step": 2998 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 3.056908034253635e-06, |
|
"loss": 0.7158, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 3.045690411141957e-06, |
|
"loss": 0.5933, |
|
"step": 3002 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 3.0344897093700333e-06, |
|
"loss": 0.6204, |
|
"step": 3004 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 3.0233059561915857e-06, |
|
"loss": 0.7443, |
|
"step": 3006 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 3.01213917881911e-06, |
|
"loss": 0.6204, |
|
"step": 3008 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 3.0009894044237907e-06, |
|
"loss": 0.6231, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.9898566601354417e-06, |
|
"loss": 0.6244, |
|
"step": 3012 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.978740973042438e-06, |
|
"loss": 0.6864, |
|
"step": 3014 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.96764237019165e-06, |
|
"loss": 0.5827, |
|
"step": 3016 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.9565608785883817e-06, |
|
"loss": 0.5573, |
|
"step": 3018 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.9454965251962973e-06, |
|
"loss": 0.5724, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.934449336937364e-06, |
|
"loss": 0.7145, |
|
"step": 3022 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.9234193406917833e-06, |
|
"loss": 0.5912, |
|
"step": 3024 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.912406563297916e-06, |
|
"loss": 0.5811, |
|
"step": 3026 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.901411031552236e-06, |
|
"loss": 0.6165, |
|
"step": 3028 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.8904327722092495e-06, |
|
"loss": 0.6321, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.879471811981437e-06, |
|
"loss": 0.5852, |
|
"step": 3032 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.868528177539187e-06, |
|
"loss": 0.6749, |
|
"step": 3034 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.857601895510729e-06, |
|
"loss": 0.6925, |
|
"step": 3036 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.8466929924820708e-06, |
|
"loss": 0.6801, |
|
"step": 3038 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.8358014949969338e-06, |
|
"loss": 0.6904, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.8249274295566863e-06, |
|
"loss": 0.685, |
|
"step": 3042 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.8140708226202883e-06, |
|
"loss": 0.718, |
|
"step": 3044 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.803231700604204e-06, |
|
"loss": 0.6328, |
|
"step": 3046 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.79241008988237e-06, |
|
"loss": 0.6388, |
|
"step": 3048 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.7816060167861003e-06, |
|
"loss": 0.619, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.7708195076040445e-06, |
|
"loss": 0.7083, |
|
"step": 3052 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.760050588582114e-06, |
|
"loss": 0.7122, |
|
"step": 3054 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.749299285923417e-06, |
|
"loss": 0.6874, |
|
"step": 3056 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.7385656257882e-06, |
|
"loss": 0.6636, |
|
"step": 3058 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.7278496342937788e-06, |
|
"loss": 0.5918, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.717151337514482e-06, |
|
"loss": 0.6187, |
|
"step": 3062 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.7064707614815776e-06, |
|
"loss": 0.6227, |
|
"step": 3064 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.695807932183219e-06, |
|
"loss": 0.6801, |
|
"step": 3066 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.685162875564378e-06, |
|
"loss": 0.6227, |
|
"step": 3068 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.674535617526777e-06, |
|
"loss": 0.6153, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.6639261839288345e-06, |
|
"loss": 0.6088, |
|
"step": 3072 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.6533346005855986e-06, |
|
"loss": 0.719, |
|
"step": 3074 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.642760893268684e-06, |
|
"loss": 0.6458, |
|
"step": 3076 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.632205087706207e-06, |
|
"loss": 0.6118, |
|
"step": 3078 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.6216672095827267e-06, |
|
"loss": 0.6674, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.611147284539183e-06, |
|
"loss": 0.6431, |
|
"step": 3082 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.6006453381728236e-06, |
|
"loss": 0.6543, |
|
"step": 3084 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.5901613960371584e-06, |
|
"loss": 0.6629, |
|
"step": 3086 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.5796954836418886e-06, |
|
"loss": 0.6761, |
|
"step": 3088 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.569247626452842e-06, |
|
"loss": 0.6178, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.558817849891918e-06, |
|
"loss": 0.6512, |
|
"step": 3092 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.548406179337015e-06, |
|
"loss": 0.6227, |
|
"step": 3094 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.5380126401219806e-06, |
|
"loss": 0.6137, |
|
"step": 3096 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.527637257536547e-06, |
|
"loss": 0.6823, |
|
"step": 3098 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.517280056826262e-06, |
|
"loss": 0.6295, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.5069410631924383e-06, |
|
"loss": 0.6753, |
|
"step": 3102 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.496620301792082e-06, |
|
"loss": 0.6056, |
|
"step": 3104 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4863177977378393e-06, |
|
"loss": 0.6788, |
|
"step": 3106 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4760335760979313e-06, |
|
"loss": 0.5443, |
|
"step": 3108 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4657676618960944e-06, |
|
"loss": 0.7115, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.455520080111522e-06, |
|
"loss": 0.6888, |
|
"step": 3112 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.445290855678791e-06, |
|
"loss": 0.619, |
|
"step": 3114 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4350800134878207e-06, |
|
"loss": 0.5954, |
|
"step": 3116 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.424887578383799e-06, |
|
"loss": 0.6003, |
|
"step": 3118 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.414713575167129e-06, |
|
"loss": 0.6596, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4045580285933555e-06, |
|
"loss": 0.6377, |
|
"step": 3122 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.394420963373124e-06, |
|
"loss": 0.6295, |
|
"step": 3124 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.3843024041721053e-06, |
|
"loss": 0.6763, |
|
"step": 3126 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.3742023756109457e-06, |
|
"loss": 0.6553, |
|
"step": 3128 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.3641209022651977e-06, |
|
"loss": 0.5758, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.3540580086652675e-06, |
|
"loss": 0.6313, |
|
"step": 3132 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.344013719296353e-06, |
|
"loss": 0.5763, |
|
"step": 3134 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.3339880585983844e-06, |
|
"loss": 0.66, |
|
"step": 3136 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.32398105096596e-06, |
|
"loss": 0.6768, |
|
"step": 3138 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.313992720748295e-06, |
|
"loss": 0.604, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.304023092249159e-06, |
|
"loss": 0.6038, |
|
"step": 3142 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.2940721897268136e-06, |
|
"loss": 0.6497, |
|
"step": 3144 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.2841400373939594e-06, |
|
"loss": 0.6327, |
|
"step": 3146 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.274226659417671e-06, |
|
"loss": 0.6151, |
|
"step": 3148 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.2643320799193404e-06, |
|
"loss": 0.5812, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.254456322974622e-06, |
|
"loss": 0.5965, |
|
"step": 3152 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.244599412613371e-06, |
|
"loss": 0.561, |
|
"step": 3154 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.234761372819577e-06, |
|
"loss": 0.6308, |
|
"step": 3156 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.2249422275313214e-06, |
|
"loss": 0.6631, |
|
"step": 3158 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.2151420006407144e-06, |
|
"loss": 0.6207, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.2053607159938195e-06, |
|
"loss": 0.5989, |
|
"step": 3162 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.1955983973906234e-06, |
|
"loss": 0.6603, |
|
"step": 3164 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.1858550685849577e-06, |
|
"loss": 0.6944, |
|
"step": 3166 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.17613075328445e-06, |
|
"loss": 0.6114, |
|
"step": 3168 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.1664254751504643e-06, |
|
"loss": 0.7135, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.1567392577980393e-06, |
|
"loss": 0.6546, |
|
"step": 3172 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.1470721247958402e-06, |
|
"loss": 0.66, |
|
"step": 3174 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.137424099666091e-06, |
|
"loss": 0.6323, |
|
"step": 3176 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.1277952058845286e-06, |
|
"loss": 0.6107, |
|
"step": 3178 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.118185466880327e-06, |
|
"loss": 0.6075, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.1085949060360654e-06, |
|
"loss": 0.5804, |
|
"step": 3182 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.0990235466876516e-06, |
|
"loss": 0.5956, |
|
"step": 3184 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.0894714121242743e-06, |
|
"loss": 0.6373, |
|
"step": 3186 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.079938525588342e-06, |
|
"loss": 0.6496, |
|
"step": 3188 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.0704249102754327e-06, |
|
"loss": 0.653, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.060930589334228e-06, |
|
"loss": 0.6464, |
|
"step": 3192 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.0514555858664663e-06, |
|
"loss": 0.6891, |
|
"step": 3194 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.0419999229268807e-06, |
|
"loss": 0.6106, |
|
"step": 3196 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.032563623523147e-06, |
|
"loss": 0.6475, |
|
"step": 3198 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.0231467106158188e-06, |
|
"loss": 0.6835, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.0137492071182862e-06, |
|
"loss": 0.6393, |
|
"step": 3202 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.0043711358967045e-06, |
|
"loss": 0.6599, |
|
"step": 3204 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.995012519769951e-06, |
|
"loss": 0.5575, |
|
"step": 3206 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.985673381509565e-06, |
|
"loss": 0.6775, |
|
"step": 3208 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.9763537438396894e-06, |
|
"loss": 0.6792, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.9670536294370203e-06, |
|
"loss": 0.6297, |
|
"step": 3212 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.9577730609307454e-06, |
|
"loss": 0.6731, |
|
"step": 3214 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.9485120609024977e-06, |
|
"loss": 0.5955, |
|
"step": 3216 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.9392706518862935e-06, |
|
"loss": 0.5511, |
|
"step": 3218 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.9300488563684804e-06, |
|
"loss": 0.615, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.9208466967876838e-06, |
|
"loss": 0.6404, |
|
"step": 3222 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.9116641955347447e-06, |
|
"loss": 0.6594, |
|
"step": 3224 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.9025013749526767e-06, |
|
"loss": 0.7208, |
|
"step": 3226 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.8933582573366037e-06, |
|
"loss": 0.5931, |
|
"step": 3228 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.8842348649337117e-06, |
|
"loss": 0.6534, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.875131219943187e-06, |
|
"loss": 0.5661, |
|
"step": 3232 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.8660473445161664e-06, |
|
"loss": 0.6018, |
|
"step": 3234 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.856983260755686e-06, |
|
"loss": 0.611, |
|
"step": 3236 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.8479389907166224e-06, |
|
"loss": 0.5963, |
|
"step": 3238 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.8389145564056387e-06, |
|
"loss": 0.6733, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.829909979781137e-06, |
|
"loss": 0.6799, |
|
"step": 3242 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.820925282753201e-06, |
|
"loss": 0.6559, |
|
"step": 3244 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.8119604871835439e-06, |
|
"loss": 0.6275, |
|
"step": 3246 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.8030156148854493e-06, |
|
"loss": 0.6545, |
|
"step": 3248 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.7940906876237285e-06, |
|
"loss": 0.6587, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.78518572711466e-06, |
|
"loss": 0.5917, |
|
"step": 3252 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.7763007550259392e-06, |
|
"loss": 0.6909, |
|
"step": 3254 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.767435792976626e-06, |
|
"loss": 0.6103, |
|
"step": 3256 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.7585908625370908e-06, |
|
"loss": 0.5358, |
|
"step": 3258 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.7497659852289629e-06, |
|
"loss": 0.6556, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.7409611825250771e-06, |
|
"loss": 0.6495, |
|
"step": 3262 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.7321764758494252e-06, |
|
"loss": 0.6276, |
|
"step": 3264 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.7234118865770988e-06, |
|
"loss": 0.6456, |
|
"step": 3266 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.7146674360342374e-06, |
|
"loss": 0.6235, |
|
"step": 3268 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.7059431454979825e-06, |
|
"loss": 0.6309, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.6972390361964197e-06, |
|
"loss": 0.6477, |
|
"step": 3272 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.688555129308531e-06, |
|
"loss": 0.5945, |
|
"step": 3274 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.6798914459641435e-06, |
|
"loss": 0.7061, |
|
"step": 3276 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.6712480072438665e-06, |
|
"loss": 0.6144, |
|
"step": 3278 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.6626248341790596e-06, |
|
"loss": 0.6135, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.6540219477517683e-06, |
|
"loss": 0.6335, |
|
"step": 3282 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.6454393688946769e-06, |
|
"loss": 0.6202, |
|
"step": 3284 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.6368771184910559e-06, |
|
"loss": 0.5869, |
|
"step": 3286 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.6283352173747148e-06, |
|
"loss": 0.6373, |
|
"step": 3288 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.6198136863299463e-06, |
|
"loss": 0.6223, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.6113125460914759e-06, |
|
"loss": 0.6037, |
|
"step": 3292 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.6028318173444202e-06, |
|
"loss": 0.6253, |
|
"step": 3294 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.594371520724226e-06, |
|
"loss": 0.5721, |
|
"step": 3296 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.5859316768166243e-06, |
|
"loss": 0.6296, |
|
"step": 3298 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.5775123061575836e-06, |
|
"loss": 0.684, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.5691134292332522e-06, |
|
"loss": 0.6273, |
|
"step": 3302 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.5607350664799158e-06, |
|
"loss": 0.6815, |
|
"step": 3304 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.552377238283943e-06, |
|
"loss": 0.6375, |
|
"step": 3306 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.5440399649817384e-06, |
|
"loss": 0.619, |
|
"step": 3308 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.5357232668596932e-06, |
|
"loss": 0.6403, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.5274271641541293e-06, |
|
"loss": 0.6767, |
|
"step": 3312 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.519151677051265e-06, |
|
"loss": 0.6177, |
|
"step": 3314 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.5108968256871437e-06, |
|
"loss": 0.6535, |
|
"step": 3316 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.5026626301476088e-06, |
|
"loss": 0.6536, |
|
"step": 3318 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.494449110468238e-06, |
|
"loss": 0.6237, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.4862562866343033e-06, |
|
"loss": 0.6474, |
|
"step": 3322 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.4780841785807166e-06, |
|
"loss": 0.6878, |
|
"step": 3324 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.4699328061919848e-06, |
|
"loss": 0.5944, |
|
"step": 3326 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.4618021893021605e-06, |
|
"loss": 0.5424, |
|
"step": 3328 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.4536923476947938e-06, |
|
"loss": 0.6762, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.4456033011028836e-06, |
|
"loss": 0.6121, |
|
"step": 3332 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.437535069208833e-06, |
|
"loss": 0.682, |
|
"step": 3334 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.4294876716443906e-06, |
|
"loss": 0.6618, |
|
"step": 3336 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.4214611279906188e-06, |
|
"loss": 0.6454, |
|
"step": 3338 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.4134554577778337e-06, |
|
"loss": 0.6992, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.4054706804855634e-06, |
|
"loss": 0.732, |
|
"step": 3342 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.3975068155424975e-06, |
|
"loss": 0.6452, |
|
"step": 3344 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.3895638823264447e-06, |
|
"loss": 0.7029, |
|
"step": 3346 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.3816419001642778e-06, |
|
"loss": 0.6145, |
|
"step": 3348 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.3737408883318948e-06, |
|
"loss": 0.569, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.365860866054165e-06, |
|
"loss": 0.7124, |
|
"step": 3352 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.358001852504891e-06, |
|
"loss": 0.6782, |
|
"step": 3354 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.3501638668067485e-06, |
|
"loss": 0.6796, |
|
"step": 3356 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.3423469280312562e-06, |
|
"loss": 0.5775, |
|
"step": 3358 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.3345510551987128e-06, |
|
"loss": 0.6854, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.326776267278167e-06, |
|
"loss": 0.6888, |
|
"step": 3362 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.3190225831873582e-06, |
|
"loss": 0.618, |
|
"step": 3364 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.3112900217926784e-06, |
|
"loss": 0.631, |
|
"step": 3366 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.3035786019091222e-06, |
|
"loss": 0.6385, |
|
"step": 3368 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.2958883423002423e-06, |
|
"loss": 0.6888, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.2882192616781031e-06, |
|
"loss": 0.617, |
|
"step": 3372 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.280571378703238e-06, |
|
"loss": 0.5717, |
|
"step": 3374 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.2729447119846017e-06, |
|
"loss": 0.7095, |
|
"step": 3376 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.265339280079525e-06, |
|
"loss": 0.654, |
|
"step": 3378 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.2577551014936651e-06, |
|
"loss": 0.6813, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.2501921946809715e-06, |
|
"loss": 0.6353, |
|
"step": 3382 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.2426505780436326e-06, |
|
"loss": 0.6768, |
|
"step": 3384 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.2351302699320334e-06, |
|
"loss": 0.6399, |
|
"step": 3386 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.2276312886447106e-06, |
|
"loss": 0.6833, |
|
"step": 3388 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.2201536524283075e-06, |
|
"loss": 0.6229, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.2126973794775343e-06, |
|
"loss": 0.6262, |
|
"step": 3392 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.2052624879351105e-06, |
|
"loss": 0.6546, |
|
"step": 3394 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.1978489958917382e-06, |
|
"loss": 0.6821, |
|
"step": 3396 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.1904569213860473e-06, |
|
"loss": 0.6165, |
|
"step": 3398 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.1830862824045552e-06, |
|
"loss": 0.6649, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.1757370968816217e-06, |
|
"loss": 0.6165, |
|
"step": 3402 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.1684093826994026e-06, |
|
"loss": 0.5745, |
|
"step": 3404 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.1611031576878118e-06, |
|
"loss": 0.5688, |
|
"step": 3406 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.1538184396244777e-06, |
|
"loss": 0.6317, |
|
"step": 3408 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.146555246234694e-06, |
|
"loss": 0.6626, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.1393135951913826e-06, |
|
"loss": 0.6329, |
|
"step": 3412 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.132093504115046e-06, |
|
"loss": 0.6357, |
|
"step": 3414 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.1248949905737283e-06, |
|
"loss": 0.6817, |
|
"step": 3416 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.1177180720829694e-06, |
|
"loss": 0.6552, |
|
"step": 3418 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.1105627661057671e-06, |
|
"loss": 0.6066, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.1034290900525279e-06, |
|
"loss": 0.6343, |
|
"step": 3422 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.096317061281027e-06, |
|
"loss": 0.6234, |
|
"step": 3424 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.0892266970963706e-06, |
|
"loss": 0.6055, |
|
"step": 3426 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.082158014750948e-06, |
|
"loss": 0.6729, |
|
"step": 3428 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.0751110314443958e-06, |
|
"loss": 0.5982, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.068085764323543e-06, |
|
"loss": 0.6599, |
|
"step": 3432 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.0610822304823887e-06, |
|
"loss": 0.5547, |
|
"step": 3434 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.0541004469620453e-06, |
|
"loss": 0.6552, |
|
"step": 3436 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.0471404307507016e-06, |
|
"loss": 0.6197, |
|
"step": 3438 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.0402021987835831e-06, |
|
"loss": 0.5363, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.0332857679429097e-06, |
|
"loss": 0.6879, |
|
"step": 3442 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.0263911550578532e-06, |
|
"loss": 0.6494, |
|
"step": 3444 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.0195183769045015e-06, |
|
"loss": 0.6298, |
|
"step": 3446 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.0126674502058054e-06, |
|
"loss": 0.7105, |
|
"step": 3448 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.005838391631555e-06, |
|
"loss": 0.6143, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 9.990312177983264e-07, |
|
"loss": 0.6185, |
|
"step": 3452 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 9.922459452694466e-07, |
|
"loss": 0.6458, |
|
"step": 3454 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 9.854825905549502e-07, |
|
"loss": 0.6494, |
|
"step": 3456 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 9.787411701115457e-07, |
|
"loss": 0.6772, |
|
"step": 3458 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 9.720217003425648e-07, |
|
"loss": 0.5951, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 9.65324197597931e-07, |
|
"loss": 0.6596, |
|
"step": 3462 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 9.586486781741212e-07, |
|
"loss": 0.5474, |
|
"step": 3464 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 9.519951583141129e-07, |
|
"loss": 0.5937, |
|
"step": 3466 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 9.453636542073629e-07, |
|
"loss": 0.6288, |
|
"step": 3468 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 9.38754181989755e-07, |
|
"loss": 0.6382, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 9.321667577435634e-07, |
|
"loss": 0.6346, |
|
"step": 3472 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 9.256013974974176e-07, |
|
"loss": 0.6447, |
|
"step": 3474 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 9.190581172262581e-07, |
|
"loss": 0.6427, |
|
"step": 3476 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 9.125369328513035e-07, |
|
"loss": 0.578, |
|
"step": 3478 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 9.060378602400055e-07, |
|
"loss": 0.6327, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.995609152060136e-07, |
|
"loss": 0.6683, |
|
"step": 3482 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.931061135091356e-07, |
|
"loss": 0.6191, |
|
"step": 3484 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.866734708553015e-07, |
|
"loss": 0.6603, |
|
"step": 3486 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.802630028965242e-07, |
|
"loss": 0.6266, |
|
"step": 3488 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.738747252308555e-07, |
|
"loss": 0.6027, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.675086534023591e-07, |
|
"loss": 0.5987, |
|
"step": 3492 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.611648029010645e-07, |
|
"loss": 0.6035, |
|
"step": 3494 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.548431891629317e-07, |
|
"loss": 0.6564, |
|
"step": 3496 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.485438275698154e-07, |
|
"loss": 0.7294, |
|
"step": 3498 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.42266733449425e-07, |
|
"loss": 0.6077, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.360119220752893e-07, |
|
"loss": 0.6056, |
|
"step": 3502 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.297794086667166e-07, |
|
"loss": 0.6647, |
|
"step": 3504 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.235692083887614e-07, |
|
"loss": 0.6177, |
|
"step": 3506 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.173813363521843e-07, |
|
"loss": 0.5869, |
|
"step": 3508 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.112158076134158e-07, |
|
"loss": 0.6142, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.050726371745221e-07, |
|
"loss": 0.5782, |
|
"step": 3512 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 7.989518399831642e-07, |
|
"loss": 0.6378, |
|
"step": 3514 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 7.928534309325675e-07, |
|
"loss": 0.7053, |
|
"step": 3516 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.867774248614801e-07, |
|
"loss": 0.6973, |
|
"step": 3518 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.807238365541392e-07, |
|
"loss": 0.6, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.746926807402344e-07, |
|
"loss": 0.6411, |
|
"step": 3522 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.686839720948735e-07, |
|
"loss": 0.6457, |
|
"step": 3524 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.626977252385437e-07, |
|
"loss": 0.657, |
|
"step": 3526 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.567339547370789e-07, |
|
"loss": 0.6444, |
|
"step": 3528 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.507926751016248e-07, |
|
"loss": 0.6061, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.448739007885985e-07, |
|
"loss": 0.5811, |
|
"step": 3532 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.389776461996578e-07, |
|
"loss": 0.6958, |
|
"step": 3534 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.331039256816664e-07, |
|
"loss": 0.6425, |
|
"step": 3536 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.27252753526656e-07, |
|
"loss": 0.6796, |
|
"step": 3538 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.214241439717961e-07, |
|
"loss": 0.6496, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.156181111993543e-07, |
|
"loss": 0.7017, |
|
"step": 3542 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.098346693366642e-07, |
|
"loss": 0.5806, |
|
"step": 3544 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.040738324560914e-07, |
|
"loss": 0.6105, |
|
"step": 3546 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 6.983356145749976e-07, |
|
"loss": 0.6146, |
|
"step": 3548 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 6.926200296557084e-07, |
|
"loss": 0.655, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 6.869270916054782e-07, |
|
"loss": 0.6617, |
|
"step": 3552 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 6.812568142764576e-07, |
|
"loss": 0.6233, |
|
"step": 3554 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 6.756092114656587e-07, |
|
"loss": 0.6708, |
|
"step": 3556 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 6.699842969149195e-07, |
|
"loss": 0.6631, |
|
"step": 3558 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 6.64382084310875e-07, |
|
"loss": 0.6319, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 6.588025872849191e-07, |
|
"loss": 0.608, |
|
"step": 3562 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 6.532458194131763e-07, |
|
"loss": 0.5455, |
|
"step": 3564 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 6.477117942164657e-07, |
|
"loss": 0.6194, |
|
"step": 3566 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 6.422005251602659e-07, |
|
"loss": 0.6055, |
|
"step": 3568 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 6.367120256546888e-07, |
|
"loss": 0.6543, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 6.312463090544396e-07, |
|
"loss": 0.5635, |
|
"step": 3572 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 6.258033886587911e-07, |
|
"loss": 0.6396, |
|
"step": 3574 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 6.203832777115449e-07, |
|
"loss": 0.6902, |
|
"step": 3576 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 6.149859894010035e-07, |
|
"loss": 0.6541, |
|
"step": 3578 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 6.096115368599364e-07, |
|
"loss": 0.6941, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 6.042599331655496e-07, |
|
"loss": 0.6815, |
|
"step": 3582 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 5.989311913394547e-07, |
|
"loss": 0.6164, |
|
"step": 3584 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 5.936253243476319e-07, |
|
"loss": 0.592, |
|
"step": 3586 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 5.883423451004033e-07, |
|
"loss": 0.5829, |
|
"step": 3588 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 5.830822664523994e-07, |
|
"loss": 0.6957, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 5.77845101202531e-07, |
|
"loss": 0.5859, |
|
"step": 3592 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 5.726308620939536e-07, |
|
"loss": 0.6012, |
|
"step": 3594 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 5.674395618140393e-07, |
|
"loss": 0.6696, |
|
"step": 3596 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 5.622712129943453e-07, |
|
"loss": 0.571, |
|
"step": 3598 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 5.571258282105829e-07, |
|
"loss": 0.6771, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 5.520034199825841e-07, |
|
"loss": 0.6563, |
|
"step": 3602 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 5.469040007742776e-07, |
|
"loss": 0.6631, |
|
"step": 3604 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 5.418275829936537e-07, |
|
"loss": 0.6118, |
|
"step": 3606 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 5.36774178992735e-07, |
|
"loss": 0.6129, |
|
"step": 3608 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 5.317438010675468e-07, |
|
"loss": 0.6661, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 5.267364614580861e-07, |
|
"loss": 0.6081, |
|
"step": 3612 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 5.217521723482943e-07, |
|
"loss": 0.665, |
|
"step": 3614 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 5.167909458660258e-07, |
|
"loss": 0.644, |
|
"step": 3616 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 5.118527940830164e-07, |
|
"loss": 0.6077, |
|
"step": 3618 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 5.069377290148603e-07, |
|
"loss": 0.6896, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 5.020457626209708e-07, |
|
"loss": 0.6148, |
|
"step": 3622 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 4.971769068045629e-07, |
|
"loss": 0.61, |
|
"step": 3624 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 4.923311734126135e-07, |
|
"loss": 0.642, |
|
"step": 3626 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 4.875085742358432e-07, |
|
"loss": 0.5783, |
|
"step": 3628 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 4.827091210086776e-07, |
|
"loss": 0.5715, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 4.779328254092253e-07, |
|
"loss": 0.602, |
|
"step": 3632 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 4.731796990592452e-07, |
|
"loss": 0.6423, |
|
"step": 3634 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 4.6844975352412503e-07, |
|
"loss": 0.5525, |
|
"step": 3636 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.637430003128429e-07, |
|
"loss": 0.6141, |
|
"step": 3638 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.5905945087795e-07, |
|
"loss": 0.551, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.543991166155337e-07, |
|
"loss": 0.6439, |
|
"step": 3642 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.497620088651966e-07, |
|
"loss": 0.6968, |
|
"step": 3644 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.451481389100232e-07, |
|
"loss": 0.6661, |
|
"step": 3646 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.4055751797655865e-07, |
|
"loss": 0.6425, |
|
"step": 3648 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.359901572347758e-07, |
|
"loss": 0.6854, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.3144606779805363e-07, |
|
"loss": 0.5804, |
|
"step": 3652 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.2692526072314224e-07, |
|
"loss": 0.6967, |
|
"step": 3654 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.2242774701014453e-07, |
|
"loss": 0.7088, |
|
"step": 3656 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 4.1795353760248567e-07, |
|
"loss": 0.5739, |
|
"step": 3658 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 4.135026433868827e-07, |
|
"loss": 0.6011, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 4.0907507519332477e-07, |
|
"loss": 0.6008, |
|
"step": 3662 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 4.046708437950464e-07, |
|
"loss": 0.612, |
|
"step": 3664 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 4.0028995990849086e-07, |
|
"loss": 0.64, |
|
"step": 3666 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 3.9593243419329907e-07, |
|
"loss": 0.6527, |
|
"step": 3668 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 3.9159827725227196e-07, |
|
"loss": 0.6117, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 3.8728749963135136e-07, |
|
"loss": 0.6583, |
|
"step": 3672 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 3.8300011181959364e-07, |
|
"loss": 0.6293, |
|
"step": 3674 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 3.787361242491394e-07, |
|
"loss": 0.6873, |
|
"step": 3676 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.744955472951928e-07, |
|
"loss": 0.5988, |
|
"step": 3678 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.702783912759955e-07, |
|
"loss": 0.5805, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.660846664528006e-07, |
|
"loss": 0.6005, |
|
"step": 3682 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.619143830298477e-07, |
|
"loss": 0.6282, |
|
"step": 3684 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.577675511543388e-07, |
|
"loss": 0.6577, |
|
"step": 3686 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.5364418091641374e-07, |
|
"loss": 0.5849, |
|
"step": 3688 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.4954428234912243e-07, |
|
"loss": 0.6813, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.4546786542840604e-07, |
|
"loss": 0.6399, |
|
"step": 3692 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.414149400730682e-07, |
|
"loss": 0.5792, |
|
"step": 3694 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 3.3738551614475477e-07, |
|
"loss": 0.6974, |
|
"step": 3696 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 3.333796034479242e-07, |
|
"loss": 0.6802, |
|
"step": 3698 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 3.293972117298294e-07, |
|
"loss": 0.6719, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 3.254383506804926e-07, |
|
"loss": 0.6227, |
|
"step": 3702 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 3.21503029932676e-07, |
|
"loss": 0.6234, |
|
"step": 3704 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 3.1759125906186796e-07, |
|
"loss": 0.6487, |
|
"step": 3706 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 3.1370304758625347e-07, |
|
"loss": 0.6597, |
|
"step": 3708 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 3.098384049666925e-07, |
|
"loss": 0.6704, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 3.059973406066963e-07, |
|
"loss": 0.6164, |
|
"step": 3712 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 3.0217986385240536e-07, |
|
"loss": 0.58, |
|
"step": 3714 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 2.9838598399256623e-07, |
|
"loss": 0.7162, |
|
"step": 3716 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 2.94615710258509e-07, |
|
"loss": 0.6323, |
|
"step": 3718 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 2.908690518241275e-07, |
|
"loss": 0.559, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 2.871460178058494e-07, |
|
"loss": 0.6824, |
|
"step": 3722 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 2.834466172626238e-07, |
|
"loss": 0.6303, |
|
"step": 3724 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 2.7977085919589253e-07, |
|
"loss": 0.5992, |
|
"step": 3726 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 2.76118752549569e-07, |
|
"loss": 0.6421, |
|
"step": 3728 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 2.7249030621001925e-07, |
|
"loss": 0.6042, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 2.6888552900603993e-07, |
|
"loss": 0.6279, |
|
"step": 3732 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 2.653044297088314e-07, |
|
"loss": 0.5844, |
|
"step": 3734 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 2.617470170319847e-07, |
|
"loss": 0.6478, |
|
"step": 3736 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 2.5821329963145346e-07, |
|
"loss": 0.6808, |
|
"step": 3738 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 2.5470328610553764e-07, |
|
"loss": 0.6273, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 2.512169849948576e-07, |
|
"loss": 0.5835, |
|
"step": 3742 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 2.477544047823399e-07, |
|
"loss": 0.6861, |
|
"step": 3744 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 2.443155538931907e-07, |
|
"loss": 0.6322, |
|
"step": 3746 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 2.409004406948778e-07, |
|
"loss": 0.6361, |
|
"step": 3748 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 2.3750907349711084e-07, |
|
"loss": 0.6046, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 2.341414605518211e-07, |
|
"loss": 0.6514, |
|
"step": 3752 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 2.307976100531384e-07, |
|
"loss": 0.5473, |
|
"step": 3754 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 2.2747753013737438e-07, |
|
"loss": 0.6101, |
|
"step": 3756 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 2.2418122888300032e-07, |
|
"loss": 0.6209, |
|
"step": 3758 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 2.2090871431063256e-07, |
|
"loss": 0.6046, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 2.176599943830071e-07, |
|
"loss": 0.5897, |
|
"step": 3762 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 2.1443507700495968e-07, |
|
"loss": 0.6813, |
|
"step": 3764 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 2.112339700234156e-07, |
|
"loss": 0.5967, |
|
"step": 3766 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 2.0805668122735767e-07, |
|
"loss": 0.5652, |
|
"step": 3768 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 2.0490321834781835e-07, |
|
"loss": 0.6635, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 2.0177358905785538e-07, |
|
"loss": 0.5666, |
|
"step": 3772 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.986678009725329e-07, |
|
"loss": 0.5857, |
|
"step": 3774 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.9558586164890592e-07, |
|
"loss": 0.6268, |
|
"step": 3776 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.9252777858599913e-07, |
|
"loss": 0.6103, |
|
"step": 3778 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.894935592247915e-07, |
|
"loss": 0.5389, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8648321094819288e-07, |
|
"loss": 0.665, |
|
"step": 3782 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8349674108103287e-07, |
|
"loss": 0.6127, |
|
"step": 3784 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8053415689003872e-07, |
|
"loss": 0.6544, |
|
"step": 3786 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.7759546558381967e-07, |
|
"loss": 0.5639, |
|
"step": 3788 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.7468067431284708e-07, |
|
"loss": 0.6099, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.7178979016943765e-07, |
|
"loss": 0.55, |
|
"step": 3792 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.689228201877391e-07, |
|
"loss": 0.6034, |
|
"step": 3794 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.660797713437079e-07, |
|
"loss": 0.6337, |
|
"step": 3796 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.6326065055510042e-07, |
|
"loss": 0.6308, |
|
"step": 3798 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.6046546468144408e-07, |
|
"loss": 0.6309, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.5769422052403172e-07, |
|
"loss": 0.6653, |
|
"step": 3802 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.5494692482590057e-07, |
|
"loss": 0.6768, |
|
"step": 3804 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.522235842718156e-07, |
|
"loss": 0.7077, |
|
"step": 3806 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.4952420548825286e-07, |
|
"loss": 0.6111, |
|
"step": 3808 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.468487950433839e-07, |
|
"loss": 0.5719, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.441973594470636e-07, |
|
"loss": 0.5759, |
|
"step": 3812 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.4156990515080682e-07, |
|
"loss": 0.6631, |
|
"step": 3814 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.3896643854777846e-07, |
|
"loss": 0.616, |
|
"step": 3816 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.3638696597277678e-07, |
|
"loss": 0.623, |
|
"step": 3818 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.338314937022145e-07, |
|
"loss": 0.6053, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.313000279541121e-07, |
|
"loss": 0.6248, |
|
"step": 3822 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.287925748880703e-07, |
|
"loss": 0.6376, |
|
"step": 3824 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.2630914060526523e-07, |
|
"loss": 0.5939, |
|
"step": 3826 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.2384973114843103e-07, |
|
"loss": 0.631, |
|
"step": 3828 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.2141435250184186e-07, |
|
"loss": 0.6548, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.1900301059130092e-07, |
|
"loss": 0.6226, |
|
"step": 3832 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.1661571128412597e-07, |
|
"loss": 0.6512, |
|
"step": 3834 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.1425246038913151e-07, |
|
"loss": 0.639, |
|
"step": 3836 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.1191326365661891e-07, |
|
"loss": 0.6667, |
|
"step": 3838 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.0959812677835968e-07, |
|
"loss": 0.6857, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.0730705538758324e-07, |
|
"loss": 0.6044, |
|
"step": 3842 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.0504005505896141e-07, |
|
"loss": 0.6689, |
|
"step": 3844 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.0279713130859515e-07, |
|
"loss": 0.7265, |
|
"step": 3846 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.0057828959400551e-07, |
|
"loss": 0.6334, |
|
"step": 3848 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 9.838353531411272e-08, |
|
"loss": 0.6233, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 9.62128738092294e-08, |
|
"loss": 0.6059, |
|
"step": 3852 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 9.406631036104507e-08, |
|
"loss": 0.5761, |
|
"step": 3854 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 9.194385019261287e-08, |
|
"loss": 0.7302, |
|
"step": 3856 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 8.984549846833612e-08, |
|
"loss": 0.6299, |
|
"step": 3858 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 8.777126029396066e-08, |
|
"loss": 0.6111, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 8.57211407165548e-08, |
|
"loss": 0.559, |
|
"step": 3862 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 8.36951447245038e-08, |
|
"loss": 0.5878, |
|
"step": 3864 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 8.169327724749543e-08, |
|
"loss": 0.5786, |
|
"step": 3866 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 7.971554315650443e-08, |
|
"loss": 0.655, |
|
"step": 3868 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 7.776194726378583e-08, |
|
"loss": 0.6681, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 7.583249432286277e-08, |
|
"loss": 0.5854, |
|
"step": 3872 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 7.392718902850981e-08, |
|
"loss": 0.7254, |
|
"step": 3874 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 7.204603601674853e-08, |
|
"loss": 0.6167, |
|
"step": 3876 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 7.018903986483083e-08, |
|
"loss": 0.5869, |
|
"step": 3878 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.835620509122898e-08, |
|
"loss": 0.6055, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.65475361556267e-08, |
|
"loss": 0.75, |
|
"step": 3882 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.47630374589081e-08, |
|
"loss": 0.6447, |
|
"step": 3884 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.300271334314434e-08, |
|
"loss": 0.6669, |
|
"step": 3886 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.12665680915836e-08, |
|
"loss": 0.6791, |
|
"step": 3888 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 5.955460592864337e-08, |
|
"loss": 0.6087, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 5.786683101989821e-08, |
|
"loss": 0.6343, |
|
"step": 3892 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 5.6203247472070844e-08, |
|
"loss": 0.6591, |
|
"step": 3894 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 5.4563859333017776e-08, |
|
"loss": 0.6753, |
|
"step": 3896 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 5.294867059172593e-08, |
|
"loss": 0.6953, |
|
"step": 3898 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.13576851782982e-08, |
|
"loss": 0.5878, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 4.979090696394795e-08, |
|
"loss": 0.6771, |
|
"step": 3902 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 4.824833976098453e-08, |
|
"loss": 0.6432, |
|
"step": 3904 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 4.672998732280776e-08, |
|
"loss": 0.6002, |
|
"step": 3906 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 4.52358533438968e-08, |
|
"loss": 0.5957, |
|
"step": 3908 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 4.3765941459804616e-08, |
|
"loss": 0.6148, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 4.232025524714356e-08, |
|
"loss": 0.5838, |
|
"step": 3912 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 4.0898798223582006e-08, |
|
"loss": 0.6766, |
|
"step": 3914 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 3.950157384783104e-08, |
|
"loss": 0.6504, |
|
"step": 3916 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 3.812858551964005e-08, |
|
"loss": 0.6663, |
|
"step": 3918 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 3.677983657978779e-08, |
|
"loss": 0.6125, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 3.5455330310071314e-08, |
|
"loss": 0.6286, |
|
"step": 3922 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 3.4155069933301535e-08, |
|
"loss": 0.6218, |
|
"step": 3924 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 3.2879058613292106e-08, |
|
"loss": 0.6801, |
|
"step": 3926 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 3.16272994548561e-08, |
|
"loss": 0.6241, |
|
"step": 3928 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 3.0399795503793795e-08, |
|
"loss": 0.6436, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 2.9196549746888237e-08, |
|
"loss": 0.6098, |
|
"step": 3932 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 2.801756511189524e-08, |
|
"loss": 0.6534, |
|
"step": 3934 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 2.6862844467540062e-08, |
|
"loss": 0.6356, |
|
"step": 3936 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 2.5732390623509628e-08, |
|
"loss": 0.6533, |
|
"step": 3938 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 2.462620633044033e-08, |
|
"loss": 0.6534, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 2.3544294279918002e-08, |
|
"loss": 0.6567, |
|
"step": 3942 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 2.2486657104471288e-08, |
|
"loss": 0.606, |
|
"step": 3944 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 2.1453297377557193e-08, |
|
"loss": 0.694, |
|
"step": 3946 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 2.044421761356552e-08, |
|
"loss": 0.6462, |
|
"step": 3948 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.9459420267804452e-08, |
|
"loss": 0.6106, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.8498907736499428e-08, |
|
"loss": 0.6251, |
|
"step": 3952 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7562682356786488e-08, |
|
"loss": 0.6783, |
|
"step": 3954 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.6650746406702277e-08, |
|
"loss": 0.6726, |
|
"step": 3956 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.576310210518517e-08, |
|
"loss": 0.5934, |
|
"step": 3958 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.489975161206636e-08, |
|
"loss": 0.636, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.4060697028063231e-08, |
|
"loss": 0.5639, |
|
"step": 3962 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.3245940394778223e-08, |
|
"loss": 0.6345, |
|
"step": 3964 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.2455483694689962e-08, |
|
"loss": 0.561, |
|
"step": 3966 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.1689328851151038e-08, |
|
"loss": 0.6328, |
|
"step": 3968 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.094747772838134e-08, |
|
"loss": 0.57, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.0229932131465836e-08, |
|
"loss": 0.643, |
|
"step": 3972 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 9.53669380634792e-09, |
|
"loss": 0.6187, |
|
"step": 3974 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 8.867764439826066e-09, |
|
"loss": 0.6811, |
|
"step": 3976 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 8.223145659550513e-09, |
|
"loss": 0.5876, |
|
"step": 3978 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 7.602839034017705e-09, |
|
"loss": 0.6392, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 7.0068460725680746e-09, |
|
"loss": 0.6035, |
|
"step": 3982 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 6.435168225381594e-09, |
|
"loss": 0.6092, |
|
"step": 3984 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.887806883474456e-09, |
|
"loss": 0.6194, |
|
"step": 3986 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.364763378694626e-09, |
|
"loss": 0.6296, |
|
"step": 3988 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 4.8660389837207336e-09, |
|
"loss": 0.7165, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 4.39163491205652e-09, |
|
"loss": 0.6239, |
|
"step": 3992 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 3.9415523180297285e-09, |
|
"loss": 0.6724, |
|
"step": 3994 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 3.5157922967898885e-09, |
|
"loss": 0.6761, |
|
"step": 3996 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 3.114355884301645e-09, |
|
"loss": 0.6321, |
|
"step": 3998 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 2.7372440573469884e-09, |
|
"loss": 0.6319, |
|
"step": 4000 |
|
} |
|
], |
|
"max_steps": 4030, |
|
"num_train_epochs": 2, |
|
"total_flos": 835628442419200.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|