|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.9985838559579205, |
|
"global_step": 1851, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.0526315789473684e-05, |
|
"loss": 11.5, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.105263157894737e-05, |
|
"loss": 6.7393, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.157894736842106e-05, |
|
"loss": 3.7191, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 3.999997059313686e-05, |
|
"loss": 3.1877, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 3.999894136200706e-05, |
|
"loss": 2.9055, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 3.9996441874195635e-05, |
|
"loss": 2.9141, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.999247231345674e-05, |
|
"loss": 2.7834, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.998703297161948e-05, |
|
"loss": 2.7901, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 3.9980124248566466e-05, |
|
"loss": 2.8676, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 3.9971746652204386e-05, |
|
"loss": 2.6787, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 3.996190079842669e-05, |
|
"loss": 2.704, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.995058741106831e-05, |
|
"loss": 2.6286, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.993780732185244e-05, |
|
"loss": 2.6983, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3.992356147032939e-05, |
|
"loss": 2.6522, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 3.9907850903807514e-05, |
|
"loss": 2.6293, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 3.989067677727622e-05, |
|
"loss": 2.5399, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 3.987204035332105e-05, |
|
"loss": 2.6687, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 3.985194300203087e-05, |
|
"loss": 2.6291, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 3.983038620089714e-05, |
|
"loss": 2.6533, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 3.980737153470528e-05, |
|
"loss": 2.5923, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 3.97829006954182e-05, |
|
"loss": 2.5941, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 3.9756975482051855e-05, |
|
"loss": 2.6612, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 3.972959780054306e-05, |
|
"loss": 2.6058, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 3.9700769663609304e-05, |
|
"loss": 2.5226, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 3.967049319060081e-05, |
|
"loss": 2.5573, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.963877060734473e-05, |
|
"loss": 2.603, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 3.9605604245981515e-05, |
|
"loss": 2.6506, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 3.9570996544793445e-05, |
|
"loss": 2.631, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 3.9534950048025396e-05, |
|
"loss": 2.5668, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 3.9497467405697756e-05, |
|
"loss": 2.6354, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 3.9458551373411664e-05, |
|
"loss": 2.5427, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.941820481214637e-05, |
|
"loss": 2.5586, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.937643068804896e-05, |
|
"loss": 2.5577, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.933323207221624e-05, |
|
"loss": 2.5664, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.9288612140468984e-05, |
|
"loss": 2.5396, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.924257417311846e-05, |
|
"loss": 2.5558, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.919512155472529e-05, |
|
"loss": 2.5306, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.9146257773850585e-05, |
|
"loss": 2.4945, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.9095986422799506e-05, |
|
"loss": 2.6086, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.904431119735718e-05, |
|
"loss": 2.4973, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.899123589651695e-05, |
|
"loss": 2.5872, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.893676442220114e-05, |
|
"loss": 2.5216, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.888090077897418e-05, |
|
"loss": 2.5367, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.882364907374819e-05, |
|
"loss": 2.5495, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.8765013515481065e-05, |
|
"loss": 2.6037, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.870499841486705e-05, |
|
"loss": 2.5012, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.864360818401982e-05, |
|
"loss": 2.4773, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.8580847336148105e-05, |
|
"loss": 2.52, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.851672048522395e-05, |
|
"loss": 2.4718, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.845123234564343e-05, |
|
"loss": 2.5251, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 3.838438773188014e-05, |
|
"loss": 2.5547, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 3.831619155813119e-05, |
|
"loss": 2.475, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 3.8246648837955965e-05, |
|
"loss": 2.4957, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 3.817576468390753e-05, |
|
"loss": 2.5425, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 3.810354430715678e-05, |
|
"loss": 2.5495, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 3.802999301710932e-05, |
|
"loss": 2.5375, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 3.795511622101516e-05, |
|
"loss": 2.5151, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 3.787891942357115e-05, |
|
"loss": 2.4864, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 3.780140822651633e-05, |
|
"loss": 2.5396, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 3.77225883282201e-05, |
|
"loss": 2.5585, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 3.764246552326328e-05, |
|
"loss": 2.4947, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 3.756104570201213e-05, |
|
"loss": 2.5004, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 3.747833485018529e-05, |
|
"loss": 2.4729, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 3.739433904841375e-05, |
|
"loss": 2.4764, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 3.7309064471793794e-05, |
|
"loss": 2.5369, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 3.7222517389433085e-05, |
|
"loss": 2.4836, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 3.7134704163989705e-05, |
|
"loss": 2.4558, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 3.7045631251204434e-05, |
|
"loss": 2.5562, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 3.6955305199426164e-05, |
|
"loss": 2.4603, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 3.6863732649130426e-05, |
|
"loss": 2.4176, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 3.677092033243128e-05, |
|
"loss": 2.4991, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 3.667687507258631e-05, |
|
"loss": 2.4846, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 3.658160378349508e-05, |
|
"loss": 2.5998, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.648511346919079e-05, |
|
"loss": 2.5061, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.638741122332539e-05, |
|
"loss": 2.4538, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.628850422864807e-05, |
|
"loss": 2.498, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.618839975647718e-05, |
|
"loss": 2.4867, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.608710516616575e-05, |
|
"loss": 2.5426, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.598462790456035e-05, |
|
"loss": 2.4842, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.588097550545368e-05, |
|
"loss": 2.4274, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.5776155589030725e-05, |
|
"loss": 2.5354, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.5670175861308496e-05, |
|
"loss": 2.455, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.556304411356954e-05, |
|
"loss": 2.5039, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.545476822178915e-05, |
|
"loss": 2.518, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.5345356146056326e-05, |
|
"loss": 2.4718, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.52348159299886e-05, |
|
"loss": 2.4318, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.512315570014071e-05, |
|
"loss": 2.5146, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.50103836654071e-05, |
|
"loss": 2.4801, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.489650811641849e-05, |
|
"loss": 2.4429, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.478153742493235e-05, |
|
"loss": 2.4654, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.4665480043217444e-05, |
|
"loss": 2.4846, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.454834450343245e-05, |
|
"loss": 2.4371, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.443013941699868e-05, |
|
"loss": 2.4071, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.431087347396702e-05, |
|
"loss": 2.3886, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.419055544237906e-05, |
|
"loss": 2.4539, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.40691941676225e-05, |
|
"loss": 2.4406, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.394679857178086e-05, |
|
"loss": 2.3687, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.382337765297756e-05, |
|
"loss": 2.4244, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.3698940484714394e-05, |
|
"loss": 2.4477, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.35734962152045e-05, |
|
"loss": 2.5432, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.34470540666998e-05, |
|
"loss": 2.4633, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.331962333481302e-05, |
|
"loss": 2.4302, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.319121338783428e-05, |
|
"loss": 2.4528, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.3061833666042416e-05, |
|
"loss": 2.3741, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.29314936810109e-05, |
|
"loss": 2.4638, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.280020301490863e-05, |
|
"loss": 2.4204, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.2667971319795473e-05, |
|
"loss": 2.436, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.253480831691264e-05, |
|
"loss": 2.4194, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.240072379596806e-05, |
|
"loss": 2.3565, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.226572761441666e-05, |
|
"loss": 2.4421, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.2129829696735636e-05, |
|
"loss": 2.4169, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.1993040033694916e-05, |
|
"loss": 2.4425, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.1855368681622584e-05, |
|
"loss": 2.399, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.171682576166565e-05, |
|
"loss": 2.3747, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.1577421459045905e-05, |
|
"loss": 2.437, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.143716602231122e-05, |
|
"loss": 2.4131, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.129606976258201e-05, |
|
"loss": 2.4329, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.115414305279327e-05, |
|
"loss": 2.4521, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.101139632693197e-05, |
|
"loss": 2.3317, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.086784007926996e-05, |
|
"loss": 2.4119, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.072348486359247e-05, |
|
"loss": 2.4315, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.0578341292422286e-05, |
|
"loss": 2.4687, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.043242003623947e-05, |
|
"loss": 2.4703, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.0285731822696954e-05, |
|
"loss": 2.5997, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.0138287435831855e-05, |
|
"loss": 2.0103, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 2.9990097715272694e-05, |
|
"loss": 1.9907, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 2.9841173555442463e-05, |
|
"loss": 1.9292, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 2.9691525904757745e-05, |
|
"loss": 1.9898, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 2.954116576482378e-05, |
|
"loss": 1.9234, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 2.9390104189625702e-05, |
|
"loss": 1.8726, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 2.923835228471587e-05, |
|
"loss": 1.9208, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 2.90859212063974e-05, |
|
"loss": 1.9407, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 2.8932822160904038e-05, |
|
"loss": 1.9377, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 2.877906640357628e-05, |
|
"loss": 1.9665, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 2.862466523803393e-05, |
|
"loss": 1.9723, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 2.846963001534507e-05, |
|
"loss": 1.9876, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 2.8313972133191615e-05, |
|
"loss": 1.9405, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 2.8157703035031353e-05, |
|
"loss": 1.9848, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 2.8000834209256665e-05, |
|
"loss": 1.9328, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 2.7843377188349962e-05, |
|
"loss": 1.9343, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 2.768534354803581e-05, |
|
"loss": 1.9245, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 2.752674490642996e-05, |
|
"loss": 1.9526, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 2.7367592923185207e-05, |
|
"loss": 1.93, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 2.720789929863421e-05, |
|
"loss": 1.9263, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 2.7047675772929328e-05, |
|
"loss": 1.9432, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 2.6886934125179504e-05, |
|
"loss": 1.9481, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 2.672568617258432e-05, |
|
"loss": 1.909, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.6563943769565258e-05, |
|
"loss": 1.9386, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.6401718806894144e-05, |
|
"loss": 1.9362, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.6239023210819027e-05, |
|
"loss": 1.9494, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.6075868942187366e-05, |
|
"loss": 1.9576, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.5912267995566746e-05, |
|
"loss": 1.937, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.5748232398363044e-05, |
|
"loss": 1.9889, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.5583774209936218e-05, |
|
"loss": 1.9285, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.5418905520713767e-05, |
|
"loss": 1.895, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.525363845130185e-05, |
|
"loss": 1.9826, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.5087985151594235e-05, |
|
"loss": 1.9869, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.4921957799879076e-05, |
|
"loss": 1.9325, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.4755568601943615e-05, |
|
"loss": 1.9479, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.4588829790176837e-05, |
|
"loss": 1.9616, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.4421753622670178e-05, |
|
"loss": 1.9706, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.425435238231638e-05, |
|
"loss": 1.9675, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.4086638375906484e-05, |
|
"loss": 1.9684, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.3918623933225043e-05, |
|
"loss": 1.9388, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.375032140614372e-05, |
|
"loss": 1.9326, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.3581743167713187e-05, |
|
"loss": 1.9521, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.3412901611253524e-05, |
|
"loss": 1.9704, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.3243809149443077e-05, |
|
"loss": 1.89, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.3074478213405937e-05, |
|
"loss": 1.9438, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.2904921251798052e-05, |
|
"loss": 1.9682, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.2735150729892013e-05, |
|
"loss": 2.008, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.2565179128660667e-05, |
|
"loss": 1.9247, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.2395018943859558e-05, |
|
"loss": 1.9377, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.222468268510828e-05, |
|
"loss": 1.9396, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.2054182874970808e-05, |
|
"loss": 1.9848, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.188353204803486e-05, |
|
"loss": 1.9382, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.1712742749990444e-05, |
|
"loss": 1.9431, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.154182753670749e-05, |
|
"loss": 1.9833, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.1370798973312813e-05, |
|
"loss": 1.9338, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.1199669633266353e-05, |
|
"loss": 1.9543, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.102845209743682e-05, |
|
"loss": 1.9455, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.085715895317679e-05, |
|
"loss": 1.9533, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.0685802793397317e-05, |
|
"loss": 2.0128, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.051439621564216e-05, |
|
"loss": 1.9471, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.0342951821161648e-05, |
|
"loss": 1.9474, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.017148221398625e-05, |
|
"loss": 1.9946, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2e-05, |
|
"loss": 1.913, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.9828517786013752e-05, |
|
"loss": 1.981, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.965704817883836e-05, |
|
"loss": 1.9809, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.948560378435784e-05, |
|
"loss": 1.9793, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1.9314197206602693e-05, |
|
"loss": 1.9207, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.914284104682322e-05, |
|
"loss": 1.8926, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 1.897154790256319e-05, |
|
"loss": 2.0005, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 1.8800330366733654e-05, |
|
"loss": 1.9432, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 1.862920102668719e-05, |
|
"loss": 1.8667, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 1.8458172463292516e-05, |
|
"loss": 1.9405, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.828725725000956e-05, |
|
"loss": 1.9617, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.8116467951965145e-05, |
|
"loss": 1.9447, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 1.79458171250292e-05, |
|
"loss": 1.9093, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 1.7775317314891724e-05, |
|
"loss": 1.9051, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 1.7604981056140446e-05, |
|
"loss": 1.916, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.7434820871339336e-05, |
|
"loss": 1.8569, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.7264849270107994e-05, |
|
"loss": 1.9163, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.709507874820195e-05, |
|
"loss": 1.9342, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.6925521786594067e-05, |
|
"loss": 1.8947, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.675619085055693e-05, |
|
"loss": 1.9396, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.6587098388746486e-05, |
|
"loss": 1.9416, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.6418256832286816e-05, |
|
"loss": 1.9382, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.6249678593856288e-05, |
|
"loss": 1.9747, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.6081376066774964e-05, |
|
"loss": 1.8799, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.591336162409352e-05, |
|
"loss": 1.8957, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 1.5745647617683627e-05, |
|
"loss": 1.8921, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.557824637732983e-05, |
|
"loss": 1.9406, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.5411170209823177e-05, |
|
"loss": 1.9282, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 1.5244431398056392e-05, |
|
"loss": 1.8621, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 1.5078042200120933e-05, |
|
"loss": 1.9375, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.4912014848405771e-05, |
|
"loss": 1.8779, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.4746361548698151e-05, |
|
"loss": 1.9353, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.4581094479286234e-05, |
|
"loss": 1.9255, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.4416225790063784e-05, |
|
"loss": 1.9163, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.4251767601636965e-05, |
|
"loss": 1.9314, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.4087732004433258e-05, |
|
"loss": 1.8751, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.3924131057812642e-05, |
|
"loss": 1.8934, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.376097678918098e-05, |
|
"loss": 1.9148, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.3598281193105858e-05, |
|
"loss": 1.8754, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.3436056230434747e-05, |
|
"loss": 1.9183, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.3274313827415678e-05, |
|
"loss": 1.9236, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.3113065874820506e-05, |
|
"loss": 1.889, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.295232422707068e-05, |
|
"loss": 1.8898, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.2792100701365794e-05, |
|
"loss": 1.8991, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.2632407076814794e-05, |
|
"loss": 1.9559, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.2473255093570039e-05, |
|
"loss": 1.9048, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.2314656451964196e-05, |
|
"loss": 1.859, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.2156622811650043e-05, |
|
"loss": 1.8825, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.1999165790743338e-05, |
|
"loss": 1.9094, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.1842296964968652e-05, |
|
"loss": 1.937, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.1686027866808394e-05, |
|
"loss": 1.8838, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.1530369984654936e-05, |
|
"loss": 1.9023, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.1375334761966074e-05, |
|
"loss": 1.9099, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.122093359642372e-05, |
|
"loss": 1.9058, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.1067177839095957e-05, |
|
"loss": 1.9359, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.0914078793602601e-05, |
|
"loss": 1.8897, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.0761647715284139e-05, |
|
"loss": 1.9341, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.0609895810374304e-05, |
|
"loss": 1.876, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.0458834235176225e-05, |
|
"loss": 1.8287, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.0308474095242267e-05, |
|
"loss": 1.8523, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.0128983382202781e-05, |
|
"loss": 2.0887, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 9.980205236069665e-06, |
|
"loss": 1.4855, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 9.832163712437392e-06, |
|
"loss": 1.4915, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 9.684869694834003e-06, |
|
"loss": 1.4679, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 9.538334011833363e-06, |
|
"loss": 1.4298, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 9.392567436259034e-06, |
|
"loss": 1.4018, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 9.247580684392345e-06, |
|
"loss": 1.4642, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 9.10338441518453e-06, |
|
"loss": 1.4434, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 8.959989229473125e-06, |
|
"loss": 1.4574, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 8.817405669202619e-06, |
|
"loss": 1.4256, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 8.675644216649478e-06, |
|
"loss": 1.4539, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 8.534715293651492e-06, |
|
"loss": 1.5016, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 8.39462926084159e-06, |
|
"loss": 1.4738, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 8.255396416886194e-06, |
|
"loss": 1.4265, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 8.117026997728079e-06, |
|
"loss": 1.4235, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 7.979531175833828e-06, |
|
"loss": 1.5084, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 7.842919059446046e-06, |
|
"loss": 1.4426, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 7.707200691840173e-06, |
|
"loss": 1.4797, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 7.572386050586196e-06, |
|
"loss": 1.4309, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 7.438485046815078e-06, |
|
"loss": 1.4505, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 7.305507524490145e-06, |
|
"loss": 1.4734, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 7.1734632596834106e-06, |
|
"loss": 1.397, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 7.042361959856825e-06, |
|
"loss": 1.4341, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 6.912213263148673e-06, |
|
"loss": 1.4599, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 6.783026737664942e-06, |
|
"loss": 1.4466, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 6.654811880775973e-06, |
|
"loss": 1.4435, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 6.527578118418187e-06, |
|
"loss": 1.4597, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 6.401334804401171e-06, |
|
"loss": 1.4217, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 6.276091219719984e-06, |
|
"loss": 1.4477, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 6.151856571872854e-06, |
|
"loss": 1.4716, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 6.028639994184277e-06, |
|
"loss": 1.4398, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 5.906450545133564e-06, |
|
"loss": 1.4442, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 5.785297207688905e-06, |
|
"loss": 1.4506, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 5.665188888646935e-06, |
|
"loss": 1.4123, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 5.546134417977984e-06, |
|
"loss": 1.456, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 5.428142548176876e-06, |
|
"loss": 1.4274, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 5.311221953619514e-06, |
|
"loss": 1.4062, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 5.195381229925156e-06, |
|
"loss": 1.427, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 5.080628893324475e-06, |
|
"loss": 1.4783, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 4.9669733800334955e-06, |
|
"loss": 1.4356, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 4.854423045633392e-06, |
|
"loss": 1.4809, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 4.742986164456196e-06, |
|
"loss": 1.4079, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 4.632670928976501e-06, |
|
"loss": 1.4884, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 4.523485449209195e-06, |
|
"loss": 1.4499, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 4.415437752113223e-06, |
|
"loss": 1.4065, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 4.308535781001457e-06, |
|
"loss": 1.4888, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 4.202787394956769e-06, |
|
"loss": 1.4707, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 4.0982003682542146e-06, |
|
"loss": 1.4426, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 3.994782389789535e-06, |
|
"loss": 1.3991, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 3.892541062513853e-06, |
|
"loss": 1.4187, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 3.7914839028747507e-06, |
|
"loss": 1.4248, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 3.691618340263701e-06, |
|
"loss": 1.447, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 3.5929517164698436e-06, |
|
"loss": 1.4394, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 3.495491285140282e-06, |
|
"loss": 1.4359, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 3.399244211246779e-06, |
|
"loss": 1.4752, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 3.304217570559052e-06, |
|
"loss": 1.4508, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 3.2104183491245466e-06, |
|
"loss": 1.4718, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 3.117853442754879e-06, |
|
"loss": 1.4514, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 3.026529656518864e-06, |
|
"loss": 1.399, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 2.936453704242215e-06, |
|
"loss": 1.4136, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 2.8476322080139862e-06, |
|
"loss": 1.4474, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 2.760071697699729e-06, |
|
"loss": 1.4542, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 2.673778610461448e-06, |
|
"loss": 1.4176, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 2.588759290284337e-06, |
|
"loss": 1.4471, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 2.505019987510426e-06, |
|
"loss": 1.4217, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 2.4225668583790474e-06, |
|
"loss": 1.4194, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 2.3414059645742504e-06, |
|
"loss": 1.3959, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 2.261543272779192e-06, |
|
"loss": 1.4689, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 2.1829846542374565e-06, |
|
"loss": 1.4568, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 2.105735884321436e-06, |
|
"loss": 1.451, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 2.029802642107734e-06, |
|
"loss": 1.4418, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.9551905099596813e-06, |
|
"loss": 1.4619, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.8819049731169059e-06, |
|
"loss": 1.4182, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.809951419292104e-06, |
|
"loss": 1.4095, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 1.7393351382749424e-06, |
|
"loss": 1.4397, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.6700613215431549e-06, |
|
"loss": 1.4747, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.6021350618809184e-06, |
|
"loss": 1.4356, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.5355613530044089e-06, |
|
"loss": 1.4381, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 1.470345089194709e-06, |
|
"loss": 1.4444, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 1.4064910649379803e-06, |
|
"loss": 1.469, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 1.3440039745729894e-06, |
|
"loss": 1.4427, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 1.2828884119460105e-06, |
|
"loss": 1.3941, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 1.2231488700730742e-06, |
|
"loss": 1.4452, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 1.1647897408096886e-06, |
|
"loss": 1.4236, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 1.107815314527929e-06, |
|
"loss": 1.4538, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 1.0522297798010594e-06, |
|
"loss": 1.4112, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 9.980372230955693e-07, |
|
"loss": 1.4808, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 9.452416284707743e-07, |
|
"loss": 1.4509, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 8.938468772859132e-07, |
|
"loss": 1.4414, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 8.438567479147975e-07, |
|
"loss": 1.4203, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 7.952749154680405e-07, |
|
"loss": 1.4294, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 7.481049515228811e-07, |
|
"loss": 1.4136, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 7.023503238606122e-07, |
|
"loss": 1.4316, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 6.580143962116281e-07, |
|
"loss": 1.4645, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 6.151004280081574e-07, |
|
"loss": 1.4692, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 5.736115741446146e-07, |
|
"loss": 1.4408, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 5.335508847456794e-07, |
|
"loss": 1.4552, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 4.949213049420576e-07, |
|
"loss": 1.4657, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 4.577256746539638e-07, |
|
"loss": 1.4189, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 4.2196672838233257e-07, |
|
"loss": 1.4573, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.876470950078037e-07, |
|
"loss": 1.4382, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.5476929759743927e-07, |
|
"loss": 1.4272, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 3.233357532192494e-07, |
|
"loss": 1.4866, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.933487727644813e-07, |
|
"loss": 1.4132, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.648105607777507e-07, |
|
"loss": 1.4498, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.3772321529494712e-07, |
|
"loss": 1.4505, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.1208872768901713e-07, |
|
"loss": 1.4338, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.8790898252354583e-07, |
|
"loss": 1.4299, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.6518575741421904e-07, |
|
"loss": 1.4378, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.4392072289814319e-07, |
|
"loss": 1.4323, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.241154423110169e-07, |
|
"loss": 1.4144, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.0577137167221863e-07, |
|
"loss": 1.4343, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 8.88898595777543e-08, |
|
"loss": 1.4625, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 7.347214710111239e-08, |
|
"loss": 1.3614, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 5.951936770202782e-08, |
|
"loss": 1.4099, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 4.7032547143155417e-08, |
|
"loss": 1.4601, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 3.60126034146524e-08, |
|
"loss": 1.4231, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 2.6460346666696835e-08, |
|
"loss": 1.4549, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.8376479149926353e-08, |
|
"loss": 1.4122, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.176159516380837e-08, |
|
"loss": 1.3961, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 6.616181012955025e-09, |
|
"loss": 1.4652, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 2.9406149713628874e-09, |
|
"loss": 1.425, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 7.351672546129785e-10, |
|
"loss": 1.4916, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 0.0, |
|
"loss": 1.3883, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 1851, |
|
"total_flos": 1.8340911973547377e+18, |
|
"train_loss": 2.0081952639620475, |
|
"train_runtime": 43810.4873, |
|
"train_samples_per_second": 5.415, |
|
"train_steps_per_second": 0.042 |
|
} |
|
], |
|
"max_steps": 1851, |
|
"num_train_epochs": 3, |
|
"total_flos": 1.8340911973547377e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|