|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.9986232216613127, |
|
"eval_steps": 500, |
|
"global_step": 3267, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 6.116207951070337e-08, |
|
"loss": 1.1319, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.0581039755351683e-07, |
|
"loss": 1.1109, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.116207951070337e-07, |
|
"loss": 1.0925, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.174311926605506e-07, |
|
"loss": 1.0283, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.2232415902140673e-06, |
|
"loss": 1.0316, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.5290519877675841e-06, |
|
"loss": 1.0114, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.8348623853211011e-06, |
|
"loss": 0.9868, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2.140672782874618e-06, |
|
"loss": 0.9866, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.4464831804281347e-06, |
|
"loss": 0.9904, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.7522935779816517e-06, |
|
"loss": 0.9907, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.0581039755351682e-06, |
|
"loss": 0.9807, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.3639143730886852e-06, |
|
"loss": 0.9628, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.6697247706422022e-06, |
|
"loss": 0.9902, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.975535168195719e-06, |
|
"loss": 0.9675, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.281345565749236e-06, |
|
"loss": 0.9575, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.587155963302753e-06, |
|
"loss": 0.9595, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.892966360856269e-06, |
|
"loss": 0.9518, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.198776758409786e-06, |
|
"loss": 0.9744, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.504587155963303e-06, |
|
"loss": 0.9569, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 5.81039755351682e-06, |
|
"loss": 0.9728, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 6.1162079510703365e-06, |
|
"loss": 0.9719, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 6.422018348623854e-06, |
|
"loss": 0.9435, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 6.7278287461773705e-06, |
|
"loss": 0.9598, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.033639143730887e-06, |
|
"loss": 0.968, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.3394495412844045e-06, |
|
"loss": 0.9591, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 7.645259938837921e-06, |
|
"loss": 0.975, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 7.951070336391438e-06, |
|
"loss": 0.9792, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 8.256880733944956e-06, |
|
"loss": 0.9831, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8.562691131498472e-06, |
|
"loss": 0.9819, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8.868501529051989e-06, |
|
"loss": 0.9605, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.174311926605506e-06, |
|
"loss": 0.9591, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.480122324159022e-06, |
|
"loss": 0.9806, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.785932721712539e-06, |
|
"loss": 0.9699, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.0091743119266055e-05, |
|
"loss": 0.9733, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.0397553516819572e-05, |
|
"loss": 0.9585, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.070336391437309e-05, |
|
"loss": 0.9809, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.1009174311926607e-05, |
|
"loss": 0.9723, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.1314984709480123e-05, |
|
"loss": 0.9623, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.162079510703364e-05, |
|
"loss": 0.9751, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.1926605504587156e-05, |
|
"loss": 0.9739, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.2232415902140673e-05, |
|
"loss": 0.9952, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.2538226299694191e-05, |
|
"loss": 0.959, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.2844036697247708e-05, |
|
"loss": 0.9811, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.3149847094801224e-05, |
|
"loss": 0.9649, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.3455657492354741e-05, |
|
"loss": 0.995, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.3761467889908258e-05, |
|
"loss": 0.9922, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.4067278287461774e-05, |
|
"loss": 0.9769, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.437308868501529e-05, |
|
"loss": 0.9736, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.4678899082568809e-05, |
|
"loss": 0.9979, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.4984709480122325e-05, |
|
"loss": 1.0041, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.5290519877675842e-05, |
|
"loss": 0.9841, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.559633027522936e-05, |
|
"loss": 0.9775, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.5902140672782875e-05, |
|
"loss": 0.9917, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.6207951070336393e-05, |
|
"loss": 1.0037, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.6513761467889912e-05, |
|
"loss": 0.9789, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.6819571865443427e-05, |
|
"loss": 0.991, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7125382262996945e-05, |
|
"loss": 0.9945, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.743119266055046e-05, |
|
"loss": 1.0008, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7737003058103978e-05, |
|
"loss": 1.0067, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.8042813455657493e-05, |
|
"loss": 0.9863, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.834862385321101e-05, |
|
"loss": 0.9825, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.865443425076453e-05, |
|
"loss": 0.9839, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.8960244648318044e-05, |
|
"loss": 0.9841, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9266055045871563e-05, |
|
"loss": 0.9876, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9571865443425077e-05, |
|
"loss": 0.9975, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9877675840978596e-05, |
|
"loss": 0.9879, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.999994861726391e-05, |
|
"loss": 1.0118, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9999634613566673e-05, |
|
"loss": 0.9976, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9999035161089365e-05, |
|
"loss": 1.0047, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.99981502769439e-05, |
|
"loss": 1.0151, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.999697998639012e-05, |
|
"loss": 1.0113, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9995524322835035e-05, |
|
"loss": 3.6465, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.999378332783191e-05, |
|
"loss": 10.618, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.999175705107905e-05, |
|
"loss": 7.8776, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9989445550418393e-05, |
|
"loss": 7.4236, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9986848891833846e-05, |
|
"loss": 7.2276, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9983967149449426e-05, |
|
"loss": 7.1782, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9980800405527127e-05, |
|
"loss": 7.1329, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.997734875046456e-05, |
|
"loss": 7.0406, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9973612282792413e-05, |
|
"loss": 6.9716, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9969591109171584e-05, |
|
"loss": 6.8977, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9965285344390185e-05, |
|
"loss": 6.9986, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9960695111360235e-05, |
|
"loss": 6.9526, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.995582054111416e-05, |
|
"loss": 6.8724, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9950661772801062e-05, |
|
"loss": 6.8315, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9945218953682736e-05, |
|
"loss": 6.8329, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9939492239129462e-05, |
|
"loss": 6.7751, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9933481792615583e-05, |
|
"loss": 6.7403, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.992718778571483e-05, |
|
"loss": 6.7105, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9920610398095427e-05, |
|
"loss": 6.6564, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9913749817514963e-05, |
|
"loss": 6.6235, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.990660623981503e-05, |
|
"loss": 6.5955, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.989917986891563e-05, |
|
"loss": 6.5393, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9891470916809362e-05, |
|
"loss": 6.5081, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9883479603555366e-05, |
|
"loss": 6.6039, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9875206157273038e-05, |
|
"loss": 6.5238, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.986665081413552e-05, |
|
"loss": 6.4699, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9857813818362963e-05, |
|
"loss": 6.4097, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9848695422215548e-05, |
|
"loss": 6.3424, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.98392958859863e-05, |
|
"loss": 6.3099, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9829615477993626e-05, |
|
"loss": 6.2784, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.98196544745737e-05, |
|
"loss": 6.2284, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.980941316007253e-05, |
|
"loss": 6.1791, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9798891826837872e-05, |
|
"loss": 6.1843, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.978809077521088e-05, |
|
"loss": 6.1663, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9777010313517517e-05, |
|
"loss": 6.1206, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9765650758059766e-05, |
|
"loss": 6.1238, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.97540124331066e-05, |
|
"loss": 6.0433, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.974209567088473e-05, |
|
"loss": 6.0664, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9729900811569103e-05, |
|
"loss": 6.0446, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.971742820327321e-05, |
|
"loss": 5.9776, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.9704678202039148e-05, |
|
"loss": 5.9745, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.9691651171827443e-05, |
|
"loss": 5.9384, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.9678347484506667e-05, |
|
"loss": 5.9117, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.966476751984283e-05, |
|
"loss": 6.119, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9650911665488533e-05, |
|
"loss": 6.0265, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.9636780316971902e-05, |
|
"loss": 5.9482, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.962237387768529e-05, |
|
"loss": 5.8934, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.960769275887378e-05, |
|
"loss": 5.8545, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.9592737379623427e-05, |
|
"loss": 5.8376, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.9577508166849308e-05, |
|
"loss": 5.7886, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.9562005555283328e-05, |
|
"loss": 5.7906, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.954622998746181e-05, |
|
"loss": 5.8274, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.9530181913712875e-05, |
|
"loss": 5.7876, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.9513861792143562e-05, |
|
"loss": 5.7297, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.9497270088626773e-05, |
|
"loss": 5.6929, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.9480407276787968e-05, |
|
"loss": 5.6823, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.9463273837991643e-05, |
|
"loss": 5.6282, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.9445870261327592e-05, |
|
"loss": 5.6081, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.942819704359693e-05, |
|
"loss": 5.5536, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.9410254689297944e-05, |
|
"loss": 5.5778, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.939204371061166e-05, |
|
"loss": 5.5404, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.9373564627387243e-05, |
|
"loss": 5.5092, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.935481796712713e-05, |
|
"loss": 5.5029, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.9335804264972018e-05, |
|
"loss": 5.4471, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.9316524063685544e-05, |
|
"loss": 5.4672, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.9296977913638806e-05, |
|
"loss": 5.4028, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.9277166372794663e-05, |
|
"loss": 5.3503, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.92570900066918e-05, |
|
"loss": 5.3415, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.923674938842857e-05, |
|
"loss": 5.3396, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.9216145098646667e-05, |
|
"loss": 5.295, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.919527772551451e-05, |
|
"loss": 5.2313, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.9174147864710486e-05, |
|
"loss": 5.202, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.9152756119405937e-05, |
|
"loss": 5.2071, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.9131103100247934e-05, |
|
"loss": 5.176, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.9109189425341853e-05, |
|
"loss": 5.1591, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.908701572023372e-05, |
|
"loss": 5.0989, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.9064582617892383e-05, |
|
"loss": 5.0671, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.9041890758691403e-05, |
|
"loss": 5.0621, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.9018940790390798e-05, |
|
"loss": 5.0536, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.8995733368118556e-05, |
|
"loss": 5.0143, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.8972269154351917e-05, |
|
"loss": 4.9735, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.8948548818898478e-05, |
|
"loss": 4.9158, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.8924573038877062e-05, |
|
"loss": 4.9633, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.8900342498698384e-05, |
|
"loss": 4.9076, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.8875857890045544e-05, |
|
"loss": 4.8885, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.8851119911854236e-05, |
|
"loss": 4.8311, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.8826129270292837e-05, |
|
"loss": 4.8127, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.8800886678742225e-05, |
|
"loss": 4.8224, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.877539285777543e-05, |
|
"loss": 4.7722, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.8749648535137048e-05, |
|
"loss": 4.7507, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.8723654445722485e-05, |
|
"loss": 4.7067, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.8697411331556958e-05, |
|
"loss": 4.6938, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.867091994177433e-05, |
|
"loss": 4.7096, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.8644181032595737e-05, |
|
"loss": 4.6845, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.861719536730795e-05, |
|
"loss": 4.6439, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.8589963716241653e-05, |
|
"loss": 4.665, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.8562486856749403e-05, |
|
"loss": 4.624, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.853476557318346e-05, |
|
"loss": 4.5849, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.8506800656873397e-05, |
|
"loss": 4.5517, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.8478592906103508e-05, |
|
"loss": 4.5289, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.8450143126090015e-05, |
|
"loss": 4.5497, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.842145212895809e-05, |
|
"loss": 4.5517, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.8392520733718684e-05, |
|
"loss": 4.5593, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.8363349766245108e-05, |
|
"loss": 4.5105, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.83339400592495e-05, |
|
"loss": 4.5006, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8304292452259037e-05, |
|
"loss": 4.4665, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8274407791591966e-05, |
|
"loss": 4.4582, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.824428693033345e-05, |
|
"loss": 4.3962, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.821393072831121e-05, |
|
"loss": 4.4116, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.8183340052071e-05, |
|
"loss": 4.4362, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.8152515774851846e-05, |
|
"loss": 4.3861, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.812145877656113e-05, |
|
"loss": 4.3563, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.8090169943749477e-05, |
|
"loss": 4.3578, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.805865016958543e-05, |
|
"loss": 4.3163, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.802690035382998e-05, |
|
"loss": 4.3062, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.7994921402810862e-05, |
|
"loss": 4.2984, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.796271422939668e-05, |
|
"loss": 4.3264, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.7930279752970866e-05, |
|
"loss": 4.2954, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.7897618899405423e-05, |
|
"loss": 4.2867, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.78647326010345e-05, |
|
"loss": 4.307, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.7831621796627773e-05, |
|
"loss": 4.2882, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.7798287431363644e-05, |
|
"loss": 4.2169, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.7764730456802278e-05, |
|
"loss": 4.2223, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.7730951830858422e-05, |
|
"loss": 4.2085, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.769695251777406e-05, |
|
"loss": 4.1943, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.76627334880909e-05, |
|
"loss": 4.1485, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.7628295718622666e-05, |
|
"loss": 4.1761, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.75936401924272e-05, |
|
"loss": 4.2029, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.755876789877842e-05, |
|
"loss": 4.1792, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.7523679833138062e-05, |
|
"loss": 4.1495, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.748837699712728e-05, |
|
"loss": 4.1342, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.7452860398498052e-05, |
|
"loss": 4.1056, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.7417131051104382e-05, |
|
"loss": 4.0872, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.738118997487341e-05, |
|
"loss": 4.0758, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.7345038195776255e-05, |
|
"loss": 4.0705, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.7308676745798748e-05, |
|
"loss": 4.0337, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.7272106662911972e-05, |
|
"loss": 4.0706, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.723532899104263e-05, |
|
"loss": 4.051, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.719834478004323e-05, |
|
"loss": 4.0815, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.7161155085662144e-05, |
|
"loss": 4.0477, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.712376096951345e-05, |
|
"loss": 4.04, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.7086163499046633e-05, |
|
"loss": 4.0102, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.704836374751612e-05, |
|
"loss": 4.0224, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.7010362793950627e-05, |
|
"loss": 4.0093, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.697216172312238e-05, |
|
"loss": 4.0132, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.693376162551613e-05, |
|
"loss": 3.9887, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.689516359729803e-05, |
|
"loss": 3.9689, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 3.9833078384399414, |
|
"eval_runtime": 298.6999, |
|
"eval_samples_per_second": 51.661, |
|
"eval_steps_per_second": 0.81, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.6856368740284342e-05, |
|
"loss": 3.994, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.6817378161909995e-05, |
|
"loss": 3.9327, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.677819297519696e-05, |
|
"loss": 3.9248, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.6738814298722484e-05, |
|
"loss": 3.9413, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.6699243256587156e-05, |
|
"loss": 3.9817, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.6659480978382815e-05, |
|
"loss": 3.9372, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.6619528599160318e-05, |
|
"loss": 3.9028, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.657938725939713e-05, |
|
"loss": 3.8865, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.6539058104964764e-05, |
|
"loss": 3.9158, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.6498542287096074e-05, |
|
"loss": 3.8638, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.6457840962352403e-05, |
|
"loss": 3.8689, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.6416955292590556e-05, |
|
"loss": 3.834, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.6375886444929628e-05, |
|
"loss": 3.8792, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.63346355917177e-05, |
|
"loss": 3.8686, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.6293203910498375e-05, |
|
"loss": 3.8564, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.6251592583977155e-05, |
|
"loss": 3.8589, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.6209802799987674e-05, |
|
"loss": 3.8278, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.6167835751457812e-05, |
|
"loss": 3.8564, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.6125692636375618e-05, |
|
"loss": 3.8202, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.6083374657755132e-05, |
|
"loss": 3.7818, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.604088302360203e-05, |
|
"loss": 3.8209, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.599821894687914e-05, |
|
"loss": 3.7893, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.5955383645471828e-05, |
|
"loss": 3.8644, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.5912378342153233e-05, |
|
"loss": 3.8387, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.586920426454934e-05, |
|
"loss": 3.8262, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.5825862645103962e-05, |
|
"loss": 3.7695, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.5782354721043546e-05, |
|
"loss": 3.7794, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.5738681734341855e-05, |
|
"loss": 3.7333, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.569484493168452e-05, |
|
"loss": 3.7563, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.565084556443345e-05, |
|
"loss": 3.7361, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.5606684888591105e-05, |
|
"loss": 3.747, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.556236416476465e-05, |
|
"loss": 3.75, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.551788465812997e-05, |
|
"loss": 3.7464, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.5473247638395547e-05, |
|
"loss": 3.7593, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.5428454379766223e-05, |
|
"loss": 3.7157, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.5383506160906826e-05, |
|
"loss": 3.7071, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.5338404264905654e-05, |
|
"loss": 3.6841, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.5293149979237875e-05, |
|
"loss": 3.6952, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.5247744595728753e-05, |
|
"loss": 3.6776, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.520218941051678e-05, |
|
"loss": 3.6699, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.5156485724016672e-05, |
|
"loss": 3.7052, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.5110634840882258e-05, |
|
"loss": 3.6975, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.5064638069969228e-05, |
|
"loss": 3.6662, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.5018496724297778e-05, |
|
"loss": 3.6939, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.497221212101511e-05, |
|
"loss": 3.6552, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.4925785581357852e-05, |
|
"loss": 3.6815, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.4879218430614346e-05, |
|
"loss": 3.6587, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.4832511998086802e-05, |
|
"loss": 3.6675, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.4785667617053347e-05, |
|
"loss": 3.6285, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.4738686624729987e-05, |
|
"loss": 3.6395, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.4691570362232422e-05, |
|
"loss": 3.6219, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.4644320174537754e-05, |
|
"loss": 3.5879, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.4596937410446117e-05, |
|
"loss": 3.6236, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.4549423422542148e-05, |
|
"loss": 3.6519, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.4501779567156394e-05, |
|
"loss": 3.6416, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.4454007204326592e-05, |
|
"loss": 3.6103, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.4406107697758838e-05, |
|
"loss": 3.5947, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.4358082414788666e-05, |
|
"loss": 3.6187, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.4309932726342007e-05, |
|
"loss": 3.5991, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.4261660006896066e-05, |
|
"loss": 3.5571, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.421326563444008e-05, |
|
"loss": 3.6026, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.4164750990435991e-05, |
|
"loss": 3.5359, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.4116117459778991e-05, |
|
"loss": 3.5824, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.4067366430758004e-05, |
|
"loss": 3.5873, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.4018499295016057e-05, |
|
"loss": 3.5691, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.3969517447510546e-05, |
|
"loss": 3.5361, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.3920422286473426e-05, |
|
"loss": 3.5292, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.3871215213371284e-05, |
|
"loss": 3.5255, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.3821897632865345e-05, |
|
"loss": 3.5514, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.3772470952771364e-05, |
|
"loss": 3.5652, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.3722936584019453e-05, |
|
"loss": 3.5358, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.3673295940613789e-05, |
|
"loss": 3.4849, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.362355043959226e-05, |
|
"loss": 3.5337, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.3573701500986012e-05, |
|
"loss": 3.5249, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.3523750547778903e-05, |
|
"loss": 3.5507, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.3473699005866904e-05, |
|
"loss": 3.5388, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.342354830401738e-05, |
|
"loss": 3.5469, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.3373299873828303e-05, |
|
"loss": 3.508, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.3322955149687391e-05, |
|
"loss": 3.5033, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.327251556873117e-05, |
|
"loss": 3.4788, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.322198257080393e-05, |
|
"loss": 3.4982, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.3171357598416642e-05, |
|
"loss": 3.5111, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.3120642096705773e-05, |
|
"loss": 3.463, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.3069837513392024e-05, |
|
"loss": 3.4749, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.3018945298739022e-05, |
|
"loss": 3.4663, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.2967966905511906e-05, |
|
"loss": 3.5109, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.2916903788935869e-05, |
|
"loss": 3.4501, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.2865757406654598e-05, |
|
"loss": 3.4358, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.2814529218688688e-05, |
|
"loss": 3.4623, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.2763220687393942e-05, |
|
"loss": 3.4665, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.2711833277419648e-05, |
|
"loss": 3.4724, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.2660368455666752e-05, |
|
"loss": 3.461, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.2608827691245993e-05, |
|
"loss": 3.4771, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.2557212455435958e-05, |
|
"loss": 3.4635, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.2505524221641097e-05, |
|
"loss": 3.4335, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.245376446534965e-05, |
|
"loss": 3.472, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.2401934664091529e-05, |
|
"loss": 3.4799, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.2350036297396153e-05, |
|
"loss": 3.4485, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.2298070846750197e-05, |
|
"loss": 3.4313, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.2246039795555314e-05, |
|
"loss": 3.4332, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.2193944629085778e-05, |
|
"loss": 3.4413, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.2141786834446105e-05, |
|
"loss": 3.421, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.2089567900528577e-05, |
|
"loss": 3.4325, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.2037289317970757e-05, |
|
"loss": 3.4061, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.1984952579112938e-05, |
|
"loss": 3.4377, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.1932559177955533e-05, |
|
"loss": 3.4344, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.1880110610116438e-05, |
|
"loss": 3.3839, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.1827608372788325e-05, |
|
"loss": 3.3993, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.1775053964695913e-05, |
|
"loss": 3.389, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.172244888605319e-05, |
|
"loss": 3.4025, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.1669794638520578e-05, |
|
"loss": 3.4452, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.1617092725162064e-05, |
|
"loss": 3.4183, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.156434465040231e-05, |
|
"loss": 3.4039, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.151155191998369e-05, |
|
"loss": 3.3809, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.1458716040923319e-05, |
|
"loss": 3.3923, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.140583852147003e-05, |
|
"loss": 3.4024, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.1352920871061318e-05, |
|
"loss": 3.376, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.1299964600280247e-05, |
|
"loss": 3.3738, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.1246971220812348e-05, |
|
"loss": 3.4101, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.1193942245402443e-05, |
|
"loss": 3.3748, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.1140879187811482e-05, |
|
"loss": 3.3673, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.108778356277331e-05, |
|
"loss": 3.3682, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.103465688595145e-05, |
|
"loss": 3.3493, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.0981500673895825e-05, |
|
"loss": 3.3544, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.0928316443999462e-05, |
|
"loss": 3.3209, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.0875105714455193e-05, |
|
"loss": 3.344, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 1.0821870004212305e-05, |
|
"loss": 3.3518, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 1.0768610832933169e-05, |
|
"loss": 3.3127, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.0715329720949887e-05, |
|
"loss": 3.3628, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.0662028189220876e-05, |
|
"loss": 3.3672, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.0608707759287454e-05, |
|
"loss": 3.3463, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.0555369953230402e-05, |
|
"loss": 3.3627, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.0502016293626513e-05, |
|
"loss": 3.3459, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.044864830350515e-05, |
|
"loss": 3.3362, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.0395267506304742e-05, |
|
"loss": 3.3494, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.034187542582931e-05, |
|
"loss": 3.3633, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.028847358620497e-05, |
|
"loss": 3.3545, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.0235063511836416e-05, |
|
"loss": 3.3147, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.0181646727363413e-05, |
|
"loss": 3.3145, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.0128224757617272e-05, |
|
"loss": 3.33, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.007479912757733e-05, |
|
"loss": 3.2897, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.0021371362327397e-05, |
|
"loss": 3.3371, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 9.967942987012243e-06, |
|
"loss": 3.2989, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 9.91451552679405e-06, |
|
"loss": 3.2985, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 9.861090506808876e-06, |
|
"loss": 3.3103, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 9.80766945212313e-06, |
|
"loss": 3.301, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 9.754253887690014e-06, |
|
"loss": 3.3136, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 9.700845338306018e-06, |
|
"loss": 3.3193, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 9.647445328567368e-06, |
|
"loss": 3.3424, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 9.594055382826534e-06, |
|
"loss": 3.3016, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 9.540677025148686e-06, |
|
"loss": 3.2602, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 9.48731177926821e-06, |
|
"loss": 3.2621, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 9.433961168545194e-06, |
|
"loss": 3.2633, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 9.380626715921972e-06, |
|
"loss": 3.2901, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 9.327309943879604e-06, |
|
"loss": 3.2936, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 9.274012374394466e-06, |
|
"loss": 3.2913, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 9.220735528894755e-06, |
|
"loss": 3.263, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 9.167480928217108e-06, |
|
"loss": 3.2958, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 9.114250092563142e-06, |
|
"loss": 3.2725, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 9.061044541456097e-06, |
|
"loss": 3.2369, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 9.007865793697425e-06, |
|
"loss": 3.2583, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.954715367323468e-06, |
|
"loss": 3.2728, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.90159477956209e-06, |
|
"loss": 3.2607, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 8.848505546789407e-06, |
|
"loss": 3.2824, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 8.795449184486457e-06, |
|
"loss": 3.2628, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 8.742427207195975e-06, |
|
"loss": 3.2473, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 8.689441128479134e-06, |
|
"loss": 3.275, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 8.636492460872347e-06, |
|
"loss": 3.2592, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 8.583582715844113e-06, |
|
"loss": 3.2729, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 8.530713403751822e-06, |
|
"loss": 3.254, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 8.47788603379869e-06, |
|
"loss": 3.2348, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 8.425102113990647e-06, |
|
"loss": 3.2028, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 8.372363151093302e-06, |
|
"loss": 3.2566, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 8.319670650588916e-06, |
|
"loss": 3.2351, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 8.26702611663346e-06, |
|
"loss": 3.2721, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 8.214431052013636e-06, |
|
"loss": 3.2191, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 8.161886958104007e-06, |
|
"loss": 3.2367, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 8.109395334824127e-06, |
|
"loss": 3.2721, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 8.056957680595733e-06, |
|
"loss": 3.2054, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 8.00457549229996e-06, |
|
"loss": 3.215, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 7.952250265234618e-06, |
|
"loss": 3.2181, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 7.899983493071506e-06, |
|
"loss": 3.2148, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 7.847776667813782e-06, |
|
"loss": 3.2244, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 7.795631279753346e-06, |
|
"loss": 3.1918, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 7.74354881742834e-06, |
|
"loss": 3.2312, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 7.691530767580613e-06, |
|
"loss": 3.2341, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 7.639578615113312e-06, |
|
"loss": 3.2151, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 7.587693843048475e-06, |
|
"loss": 3.1932, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 7.535877932484714e-06, |
|
"loss": 3.1933, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 7.484132362554915e-06, |
|
"loss": 3.2302, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 7.432458610384037e-06, |
|
"loss": 3.1807, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 7.380858151046922e-06, |
|
"loss": 3.2067, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 7.329332457526215e-06, |
|
"loss": 3.1698, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 7.27788300067029e-06, |
|
"loss": 3.2017, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 7.2265112491512824e-06, |
|
"loss": 3.213, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 7.175218669423153e-06, |
|
"loss": 3.1922, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 7.124006725679828e-06, |
|
"loss": 3.2127, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 7.0728768798134195e-06, |
|
"loss": 3.2197, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 7.0218305913724615e-06, |
|
"loss": 3.2192, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.97086931752028e-06, |
|
"loss": 3.2106, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.919994512993369e-06, |
|
"loss": 3.219, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.869207630059885e-06, |
|
"loss": 3.184, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.818510118478172e-06, |
|
"loss": 3.1925, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.767903425455402e-06, |
|
"loss": 3.1785, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 6.7173889956062285e-06, |
|
"loss": 3.1938, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 6.666968270911585e-06, |
|
"loss": 3.1738, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 6.616642690677488e-06, |
|
"loss": 3.1936, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 6.566413691493989e-06, |
|
"loss": 3.159, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 6.5162827071941194e-06, |
|
"loss": 3.1676, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 6.46625116881301e-06, |
|
"loss": 3.1511, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 6.4163205045469975e-06, |
|
"loss": 3.1833, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 6.366492139712886e-06, |
|
"loss": 3.1522, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 6.316767496707237e-06, |
|
"loss": 3.2052, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 6.267147994965792e-06, |
|
"loss": 3.1991, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 6.217635050922923e-06, |
|
"loss": 3.1547, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 6.168230077971224e-06, |
|
"loss": 3.152, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 6.118934486421149e-06, |
|
"loss": 3.1728, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 6.069749683460765e-06, |
|
"loss": 3.1562, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 3.173952341079712, |
|
"eval_runtime": 298.8936, |
|
"eval_samples_per_second": 51.627, |
|
"eval_steps_per_second": 0.81, |
|
"step": 2179 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 6.020677073115571e-06, |
|
"loss": 3.1367, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.971718056208431e-06, |
|
"loss": 3.1139, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.922874030319567e-06, |
|
"loss": 3.1473, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.874146389746697e-06, |
|
"loss": 3.1541, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.825536525465192e-06, |
|
"loss": 3.1406, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.7770458250884044e-06, |
|
"loss": 3.119, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 5.728675672828037e-06, |
|
"loss": 3.1426, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 5.680427449454631e-06, |
|
"loss": 3.1518, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 5.6323025322581694e-06, |
|
"loss": 3.1133, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 5.5843022950087325e-06, |
|
"loss": 3.1132, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 5.5364281079173075e-06, |
|
"loss": 3.1484, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 5.488681337596653e-06, |
|
"loss": 3.1142, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 5.441063347022313e-06, |
|
"loss": 3.1046, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 5.393575495493679e-06, |
|
"loss": 3.0933, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 5.346219138595215e-06, |
|
"loss": 3.1068, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 5.298995628157738e-06, |
|
"loss": 3.1296, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 5.251906312219859e-06, |
|
"loss": 3.1175, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 5.2049525349894625e-06, |
|
"loss": 3.066, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 5.158135636805372e-06, |
|
"loss": 3.0771, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 5.111456954099064e-06, |
|
"loss": 3.0995, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 5.064917819356532e-06, |
|
"loss": 3.07, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 5.0185195610802365e-06, |
|
"loss": 3.0931, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.9722635037512006e-06, |
|
"loss": 3.0755, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.92615096779118e-06, |
|
"loss": 3.0904, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.8801832695249864e-06, |
|
"loss": 3.0652, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.834361721142901e-06, |
|
"loss": 3.066, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.788687630663232e-06, |
|
"loss": 3.1098, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.743162301894952e-06, |
|
"loss": 3.106, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.697787034400511e-06, |
|
"loss": 3.1032, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.652563123458703e-06, |
|
"loss": 3.051, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.607491860027726e-06, |
|
"loss": 3.1206, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.5625745307083e-06, |
|
"loss": 3.0868, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 4.517812417706967e-06, |
|
"loss": 3.1152, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 4.473206798799469e-06, |
|
"loss": 3.0738, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 4.428758947294278e-06, |
|
"loss": 3.0936, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 4.3844701319962525e-06, |
|
"loss": 3.0962, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 4.3403416171704225e-06, |
|
"loss": 3.1008, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 4.296374662505885e-06, |
|
"loss": 3.0757, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 4.252570523079852e-06, |
|
"loss": 3.1204, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 4.2089304493218355e-06, |
|
"loss": 3.0742, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 4.165455686977927e-06, |
|
"loss": 3.0537, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 4.12214747707527e-06, |
|
"loss": 3.0725, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 4.079007055886598e-06, |
|
"loss": 3.0505, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 4.036035654894967e-06, |
|
"loss": 3.075, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.993234500758597e-06, |
|
"loss": 3.0824, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.9506048152758584e-06, |
|
"loss": 3.1028, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.90814781535038e-06, |
|
"loss": 3.0949, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.865864712956336e-06, |
|
"loss": 3.1145, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.823756715103822e-06, |
|
"loss": 3.0736, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.7818250238044274e-06, |
|
"loss": 3.0743, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.740070836036893e-06, |
|
"loss": 3.0886, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.6984953437129734e-06, |
|
"loss": 3.0525, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.6570997336433854e-06, |
|
"loss": 3.0829, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.6158851875039458e-06, |
|
"loss": 3.0823, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.57485288180183e-06, |
|
"loss": 3.0612, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.534003987842005e-06, |
|
"loss": 3.0736, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.493339671693765e-06, |
|
"loss": 3.092, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.4528610941574724e-06, |
|
"loss": 3.0988, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 3.412569410731401e-06, |
|
"loss": 3.0658, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 3.372465771578771e-06, |
|
"loss": 3.0688, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 3.33255132149489e-06, |
|
"loss": 3.1096, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 3.2928271998745074e-06, |
|
"loss": 3.0544, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 3.2532945406792573e-06, |
|
"loss": 3.0951, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 3.2139544724053083e-06, |
|
"loss": 3.0645, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 3.1748081180511393e-06, |
|
"loss": 3.0489, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 3.135856595085498e-06, |
|
"loss": 3.0727, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 3.097101015415478e-06, |
|
"loss": 3.0441, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 3.0585424853547953e-06, |
|
"loss": 3.0358, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 3.02018210559221e-06, |
|
"loss": 3.0223, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.9820209711600858e-06, |
|
"loss": 3.0548, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.9440601714031614e-06, |
|
"loss": 3.0687, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.9063007899474214e-06, |
|
"loss": 3.0546, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.8687439046691956e-06, |
|
"loss": 3.043, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.83139058766436e-06, |
|
"loss": 3.0779, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.794241905217753e-06, |
|
"loss": 3.0609, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.757298917772727e-06, |
|
"loss": 3.0404, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.720562679900892e-06, |
|
"loss": 3.0568, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.6840342402719867e-06, |
|
"loss": 3.033, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.6477146416239695e-06, |
|
"loss": 3.0252, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.6116049207332304e-06, |
|
"loss": 3.0511, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.5757061083850153e-06, |
|
"loss": 3.0329, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.5400192293439864e-06, |
|
"loss": 3.038, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.5045453023249724e-06, |
|
"loss": 3.0511, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.469285339963892e-06, |
|
"loss": 3.0528, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.43424034878885e-06, |
|
"loss": 3.0811, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.399411329191393e-06, |
|
"loss": 3.079, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.3647992753979698e-06, |
|
"loss": 3.0413, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.330405175441529e-06, |
|
"loss": 3.0101, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.296230011133337e-06, |
|
"loss": 3.0165, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 2.2622747580349313e-06, |
|
"loss": 3.0654, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 2.2285403854302912e-06, |
|
"loss": 3.0348, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 2.1950278562981497e-06, |
|
"loss": 3.0505, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 2.1617381272845174e-06, |
|
"loss": 3.0139, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 2.128672148675366e-06, |
|
"loss": 3.0148, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 2.095830864369518e-06, |
|
"loss": 3.0709, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 2.063215211851678e-06, |
|
"loss": 3.0484, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 2.0308261221656845e-06, |
|
"loss": 3.051, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.9986645198879385e-06, |
|
"loss": 3.0223, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.9667313231009955e-06, |
|
"loss": 3.012, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.9350274433673745e-06, |
|
"loss": 3.0439, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.9035537857035157e-06, |
|
"loss": 3.0163, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.8723112485539741e-06, |
|
"loss": 3.0249, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.8413007237657422e-06, |
|
"loss": 3.0288, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.810523096562814e-06, |
|
"loss": 3.0737, |
|
"step": 2695 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.7799792455209019e-06, |
|
"loss": 3.0578, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.7496700425423708e-06, |
|
"loss": 3.0571, |
|
"step": 2705 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.719596352831332e-06, |
|
"loss": 3.0531, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.6897590348689607e-06, |
|
"loss": 3.0621, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.660158940388975e-06, |
|
"loss": 3.0343, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.6307969143533397e-06, |
|
"loss": 2.9669, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.601673794928127e-06, |
|
"loss": 2.9813, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.5727904134596084e-06, |
|
"loss": 3.0041, |
|
"step": 2735 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.544147594450508e-06, |
|
"loss": 3.0225, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.5157461555364772e-06, |
|
"loss": 3.0281, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.4875869074627436e-06, |
|
"loss": 3.0106, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.4596706540609862e-06, |
|
"loss": 3.041, |
|
"step": 2755 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.4319981922263636e-06, |
|
"loss": 3.0251, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.404570311894793e-06, |
|
"loss": 3.0577, |
|
"step": 2765 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.377387796020374e-06, |
|
"loss": 3.04, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.350451420553065e-06, |
|
"loss": 3.0483, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.3237619544165081e-06, |
|
"loss": 3.0236, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.2973201594860985e-06, |
|
"loss": 3.0387, |
|
"step": 2785 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.271126790567223e-06, |
|
"loss": 3.0403, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.2451825953737273e-06, |
|
"loss": 3.0073, |
|
"step": 2795 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.219488314506556e-06, |
|
"loss": 3.0374, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.19404468143262e-06, |
|
"loss": 3.0193, |
|
"step": 2805 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.1688524224638652e-06, |
|
"loss": 3.0517, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.1439122567365214e-06, |
|
"loss": 3.0039, |
|
"step": 2815 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.119224896190595e-06, |
|
"loss": 3.0071, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.0947910455495248e-06, |
|
"loss": 2.98, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.070611402300089e-06, |
|
"loss": 3.0076, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.0466866566724698e-06, |
|
"loss": 3.018, |
|
"step": 2835 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.0230174916205681e-06, |
|
"loss": 3.0337, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 9.99604582802498e-07, |
|
"loss": 3.018, |
|
"step": 2845 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 9.764485985613092e-07, |
|
"loss": 3.0559, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 9.535501999058971e-07, |
|
"loss": 3.0159, |
|
"step": 2855 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 9.309100404921445e-07, |
|
"loss": 3.0024, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 9.085287666042508e-07, |
|
"loss": 3.0056, |
|
"step": 2865 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 8.86407017136296e-07, |
|
"loss": 2.9986, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 8.645454235739903e-07, |
|
"loss": 3.0271, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 8.429446099766614e-07, |
|
"loss": 2.9838, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 8.216051929594271e-07, |
|
"loss": 3.0144, |
|
"step": 2885 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 8.005277816755985e-07, |
|
"loss": 3.0081, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 7.797129777992951e-07, |
|
"loss": 3.0238, |
|
"step": 2895 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 7.591613755082661e-07, |
|
"loss": 3.0089, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 7.38873561466924e-07, |
|
"loss": 3.0092, |
|
"step": 2905 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 7.188501148096117e-07, |
|
"loss": 2.9975, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 6.990916071240506e-07, |
|
"loss": 3.0156, |
|
"step": 2915 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 6.795986024350465e-07, |
|
"loss": 3.0238, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 6.60371657188369e-07, |
|
"loss": 3.0223, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 6.414113202348793e-07, |
|
"loss": 2.9978, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 6.227181328148568e-07, |
|
"loss": 2.9838, |
|
"step": 2935 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 6.042926285425577e-07, |
|
"loss": 2.9852, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.861353333909692e-07, |
|
"loss": 3.0411, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 5.682467656768054e-07, |
|
"loss": 3.024, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 5.506274360457087e-07, |
|
"loss": 3.0024, |
|
"step": 2955 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 5.332778474576694e-07, |
|
"loss": 3.0062, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 5.161984951726762e-07, |
|
"loss": 3.0125, |
|
"step": 2965 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.993898667365671e-07, |
|
"loss": 3.007, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.828524419671266e-07, |
|
"loss": 3.0326, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.6658669294037393e-07, |
|
"loss": 3.01, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.505930839770967e-07, |
|
"loss": 2.9916, |
|
"step": 2985 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.348720716295918e-07, |
|
"loss": 3.0423, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.194241046686398e-07, |
|
"loss": 2.9791, |
|
"step": 2995 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.0424962407068167e-07, |
|
"loss": 3.0081, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.8934906300524654e-07, |
|
"loss": 3.0078, |
|
"step": 3005 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.7472284682256943e-07, |
|
"loss": 3.0088, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.603713930414676e-07, |
|
"loss": 2.988, |
|
"step": 3015 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.462951113374036e-07, |
|
"loss": 3.0218, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.324944035308053e-07, |
|
"loss": 3.0111, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.189696635755868e-07, |
|
"loss": 3.0035, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.05721277547909e-07, |
|
"loss": 3.0153, |
|
"step": 3035 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.927496236351501e-07, |
|
"loss": 3.015, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.8005507212512164e-07, |
|
"loss": 3.0029, |
|
"step": 3045 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.676379853954858e-07, |
|
"loss": 2.9836, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.554987179034218e-07, |
|
"loss": 2.9989, |
|
"step": 3055 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.436376161755005e-07, |
|
"loss": 3.0051, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.3205501879779546e-07, |
|
"loss": 3.0094, |
|
"step": 3065 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.2075125640621531e-07, |
|
"loss": 3.0073, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.097266516770713e-07, |
|
"loss": 2.9887, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.9898151931785682e-07, |
|
"loss": 3.0277, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.885161660582746e-07, |
|
"loss": 3.025, |
|
"step": 3085 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.7833089064146825e-07, |
|
"loss": 3.0513, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.6842598381551e-07, |
|
"loss": 3.0129, |
|
"step": 3095 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.5880172832508644e-07, |
|
"loss": 3.005, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.494583989034326e-07, |
|
"loss": 2.9974, |
|
"step": 3105 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.40396262264495e-07, |
|
"loss": 3.0213, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.3161557709530982e-07, |
|
"loss": 3.0189, |
|
"step": 3115 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.231165940486234e-07, |
|
"loss": 3.0322, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.1489955573573241e-07, |
|
"loss": 2.9922, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.0696469671956588e-07, |
|
"loss": 3.0492, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 9.931224350798185e-08, |
|
"loss": 2.9918, |
|
"step": 3135 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 9.19424145473058e-08, |
|
"loss": 3.0176, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 8.485542021609117e-08, |
|
"loss": 3.045, |
|
"step": 3145 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 7.805146281912202e-08, |
|
"loss": 3.0608, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 7.153073658162646e-08, |
|
"loss": 3.0038, |
|
"step": 3155 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 6.529342764374225e-08, |
|
"loss": 3.0085, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 5.933971405519656e-08, |
|
"loss": 3.0356, |
|
"step": 3165 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 5.3669765770228974e-08, |
|
"loss": 2.9979, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 4.828374464273422e-08, |
|
"loss": 2.9973, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 4.318180442164588e-08, |
|
"loss": 3.0554, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.8364090746547676e-08, |
|
"loss": 2.9875, |
|
"step": 3185 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 3.383074114351237e-08, |
|
"loss": 3.02, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.9581885021181534e-08, |
|
"loss": 3.0043, |
|
"step": 3195 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 2.5617643667065207e-08, |
|
"loss": 2.9746, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 2.1938130244083535e-08, |
|
"loss": 3.025, |
|
"step": 3205 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.854344978733824e-08, |
|
"loss": 2.9946, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.5433699201108377e-08, |
|
"loss": 2.9883, |
|
"step": 3215 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.2608967256088067e-08, |
|
"loss": 2.9877, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.0069334586854106e-08, |
|
"loss": 3.0237, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 7.8148736895578e-09, |
|
"loss": 3.0206, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 5.845648919863278e-09, |
|
"loss": 2.9899, |
|
"step": 3235 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.161716491105639e-09, |
|
"loss": 2.9984, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 2.763124472685563e-09, |
|
"loss": 3.0315, |
|
"step": 3245 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 1.6499127887026346e-09, |
|
"loss": 3.0021, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 8.221132168073631e-10, |
|
"loss": 3.0235, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 2.797493873019086e-10, |
|
"loss": 3.0035, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 2.283678246284282e-11, |
|
"loss": 2.99, |
|
"step": 3265 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 3.044856548309326, |
|
"eval_runtime": 298.6451, |
|
"eval_samples_per_second": 51.67, |
|
"eval_steps_per_second": 0.81, |
|
"step": 3267 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 3267, |
|
"total_flos": 3.65384602859539e+19, |
|
"train_loss": 3.471017555902958, |
|
"train_runtime": 35969.3936, |
|
"train_samples_per_second": 11.629, |
|
"train_steps_per_second": 0.091 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 3267, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 1000, |
|
"total_flos": 3.65384602859539e+19, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|