|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.9873039581777445, |
|
"eval_steps": 500, |
|
"global_step": 4000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.98755290017426e-05, |
|
"loss": 1.7892, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.975105800348519e-05, |
|
"loss": 1.7869, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.962658700522779e-05, |
|
"loss": 1.7872, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9502116006970375e-05, |
|
"loss": 1.7552, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.937764500871298e-05, |
|
"loss": 1.6772, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9253174010455565e-05, |
|
"loss": 1.7078, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.912870301219816e-05, |
|
"loss": 1.6951, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9004232013940755e-05, |
|
"loss": 1.7137, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.887976101568335e-05, |
|
"loss": 1.7008, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.8755290017425944e-05, |
|
"loss": 1.725, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.863081901916854e-05, |
|
"loss": 1.7187, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.850634802091113e-05, |
|
"loss": 1.7209, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.838187702265373e-05, |
|
"loss": 1.7354, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.825740602439632e-05, |
|
"loss": 1.7021, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.813293502613891e-05, |
|
"loss": 1.6657, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.800846402788151e-05, |
|
"loss": 1.6852, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.78839930296241e-05, |
|
"loss": 1.6746, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.7759522031366697e-05, |
|
"loss": 1.6886, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.763505103310929e-05, |
|
"loss": 1.6571, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.751058003485188e-05, |
|
"loss": 1.7131, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.738610903659448e-05, |
|
"loss": 1.7099, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.726163803833707e-05, |
|
"loss": 1.7284, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.7137167040079664e-05, |
|
"loss": 1.704, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.701269604182226e-05, |
|
"loss": 1.6921, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.6888225043564854e-05, |
|
"loss": 1.6828, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.676375404530744e-05, |
|
"loss": 1.7255, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.6639283047050044e-05, |
|
"loss": 1.6767, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.651481204879263e-05, |
|
"loss": 1.6796, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.6390341050535227e-05, |
|
"loss": 1.7305, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.626587005227782e-05, |
|
"loss": 1.7471, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.6141399054020416e-05, |
|
"loss": 1.6348, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.601692805576301e-05, |
|
"loss": 1.676, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.5892457057505606e-05, |
|
"loss": 1.6602, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.5767986059248194e-05, |
|
"loss": 1.6634, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.5643515060990796e-05, |
|
"loss": 1.6952, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.5519044062733384e-05, |
|
"loss": 1.683, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.539457306447598e-05, |
|
"loss": 1.6572, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.5270102066218574e-05, |
|
"loss": 1.6472, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.514563106796117e-05, |
|
"loss": 1.6852, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.502116006970376e-05, |
|
"loss": 1.6737, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.489668907144636e-05, |
|
"loss": 1.6936, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.4772218073188946e-05, |
|
"loss": 1.6713, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.464774707493155e-05, |
|
"loss": 1.6681, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.4523276076674136e-05, |
|
"loss": 1.6231, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.439880507841673e-05, |
|
"loss": 1.6593, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.4274334080159326e-05, |
|
"loss": 1.6844, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.414986308190192e-05, |
|
"loss": 1.6751, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.4025392083644516e-05, |
|
"loss": 1.6748, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.390092108538711e-05, |
|
"loss": 1.6793, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.37764500871297e-05, |
|
"loss": 1.6959, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.36519790888723e-05, |
|
"loss": 1.6893, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.352750809061489e-05, |
|
"loss": 1.6733, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.340303709235748e-05, |
|
"loss": 1.6968, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.327856609410008e-05, |
|
"loss": 1.6567, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.315409509584267e-05, |
|
"loss": 1.6775, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.302962409758526e-05, |
|
"loss": 1.7103, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.290515309932786e-05, |
|
"loss": 1.6945, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.278068210107045e-05, |
|
"loss": 1.6926, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.2656211102813046e-05, |
|
"loss": 1.6629, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.253174010455564e-05, |
|
"loss": 1.7125, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.2407269106298235e-05, |
|
"loss": 1.6778, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.228279810804083e-05, |
|
"loss": 1.6428, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.2158327109783425e-05, |
|
"loss": 1.6473, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.203385611152601e-05, |
|
"loss": 1.6821, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.1909385113268615e-05, |
|
"loss": 1.6827, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.17849141150112e-05, |
|
"loss": 1.6818, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.16604431167538e-05, |
|
"loss": 1.7324, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.153597211849639e-05, |
|
"loss": 1.7007, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.141150112023899e-05, |
|
"loss": 1.6554, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.128703012198158e-05, |
|
"loss": 1.6719, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.116255912372418e-05, |
|
"loss": 1.641, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.1038088125466765e-05, |
|
"loss": 1.7093, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.091361712720937e-05, |
|
"loss": 1.726, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.0789146128951955e-05, |
|
"loss": 1.6654, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.066467513069455e-05, |
|
"loss": 1.7246, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.0540204132437145e-05, |
|
"loss": 1.6712, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.041573313417974e-05, |
|
"loss": 1.664, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.029126213592233e-05, |
|
"loss": 1.6806, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.016679113766493e-05, |
|
"loss": 1.6865, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.004232013940752e-05, |
|
"loss": 1.6939, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.991784914115011e-05, |
|
"loss": 1.6803, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.979337814289271e-05, |
|
"loss": 1.6861, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.96689071446353e-05, |
|
"loss": 1.6644, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.95444361463779e-05, |
|
"loss": 1.6767, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.941996514812049e-05, |
|
"loss": 1.6824, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.929549414986308e-05, |
|
"loss": 1.6679, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.917102315160568e-05, |
|
"loss": 1.6844, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.904655215334827e-05, |
|
"loss": 1.6082, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.8922081155090865e-05, |
|
"loss": 1.6625, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.879761015683346e-05, |
|
"loss": 1.6618, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.8673139158576054e-05, |
|
"loss": 1.702, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.854866816031865e-05, |
|
"loss": 1.7301, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.8424197162061244e-05, |
|
"loss": 1.691, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.829972616380383e-05, |
|
"loss": 1.7045, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.8175255165546434e-05, |
|
"loss": 1.6659, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.805078416728902e-05, |
|
"loss": 1.6984, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.792631316903162e-05, |
|
"loss": 1.6618, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.780184217077421e-05, |
|
"loss": 1.7073, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.7677371172516806e-05, |
|
"loss": 1.6488, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.7552900174259395e-05, |
|
"loss": 1.6705, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.7428429176001996e-05, |
|
"loss": 1.7043, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.7303958177744584e-05, |
|
"loss": 1.6837, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.717948717948718e-05, |
|
"loss": 1.6678, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.7055016181229774e-05, |
|
"loss": 1.6359, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.693054518297237e-05, |
|
"loss": 1.6795, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.6806074184714964e-05, |
|
"loss": 1.6368, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.668160318645756e-05, |
|
"loss": 1.6751, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.655713218820015e-05, |
|
"loss": 1.7043, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.643266118994275e-05, |
|
"loss": 1.7065, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.6308190191685337e-05, |
|
"loss": 1.6563, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.618371919342793e-05, |
|
"loss": 1.6445, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.6059248195170526e-05, |
|
"loss": 1.6907, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.593477719691312e-05, |
|
"loss": 1.6746, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.5810306198655716e-05, |
|
"loss": 1.6559, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.568583520039831e-05, |
|
"loss": 1.6483, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.55613642021409e-05, |
|
"loss": 1.6419, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.54368932038835e-05, |
|
"loss": 1.689, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.531242220562609e-05, |
|
"loss": 1.7161, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.5187951207368684e-05, |
|
"loss": 1.6786, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.506348020911128e-05, |
|
"loss": 1.7053, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.493900921085387e-05, |
|
"loss": 1.6833, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.481453821259646e-05, |
|
"loss": 1.7334, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.469006721433906e-05, |
|
"loss": 1.6493, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.456559621608165e-05, |
|
"loss": 1.7013, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.4441125217824246e-05, |
|
"loss": 1.6501, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.431665421956684e-05, |
|
"loss": 1.6614, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.4192183221309436e-05, |
|
"loss": 1.7156, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.406771222305203e-05, |
|
"loss": 1.691, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.3943241224794625e-05, |
|
"loss": 1.66, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.3818770226537214e-05, |
|
"loss": 1.6602, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.3694299228279815e-05, |
|
"loss": 1.7051, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.35698282300224e-05, |
|
"loss": 1.6677, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.3445357231765e-05, |
|
"loss": 1.6712, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.332088623350759e-05, |
|
"loss": 1.6599, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.319641523525019e-05, |
|
"loss": 1.6524, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.307194423699278e-05, |
|
"loss": 1.662, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.294747323873538e-05, |
|
"loss": 1.6652, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.2823002240477966e-05, |
|
"loss": 1.6661, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.269853124222057e-05, |
|
"loss": 1.6812, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.2574060243963156e-05, |
|
"loss": 1.6632, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.244958924570575e-05, |
|
"loss": 1.6527, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.2325118247448345e-05, |
|
"loss": 1.694, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.220064724919094e-05, |
|
"loss": 1.6933, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.2076176250933535e-05, |
|
"loss": 1.671, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.195170525267613e-05, |
|
"loss": 1.6708, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.182723425441872e-05, |
|
"loss": 1.6674, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.170276325616132e-05, |
|
"loss": 1.7056, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.157829225790391e-05, |
|
"loss": 1.7015, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.14538212596465e-05, |
|
"loss": 1.6856, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.13293502613891e-05, |
|
"loss": 1.6423, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.120487926313169e-05, |
|
"loss": 1.6972, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.108040826487428e-05, |
|
"loss": 1.6977, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.095593726661688e-05, |
|
"loss": 1.6715, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.083146626835947e-05, |
|
"loss": 1.7027, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.0706995270102065e-05, |
|
"loss": 1.6648, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.058252427184466e-05, |
|
"loss": 1.7031, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.0458053273587255e-05, |
|
"loss": 1.6848, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.0333582275329846e-05, |
|
"loss": 1.634, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.0209111277072444e-05, |
|
"loss": 1.6592, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.0084640278815036e-05, |
|
"loss": 1.6245, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.996016928055763e-05, |
|
"loss": 1.6829, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 2.9835698282300222e-05, |
|
"loss": 1.7055, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.971122728404282e-05, |
|
"loss": 1.6517, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.9586756285785412e-05, |
|
"loss": 1.6797, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 2.9462285287528007e-05, |
|
"loss": 1.6287, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 2.93378142892706e-05, |
|
"loss": 1.6601, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9213343291013197e-05, |
|
"loss": 1.6898, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.9088872292755788e-05, |
|
"loss": 1.655, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.8964401294498383e-05, |
|
"loss": 1.6495, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 2.8839930296240975e-05, |
|
"loss": 1.7093, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.8715459297983573e-05, |
|
"loss": 1.6713, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 2.859098829972616e-05, |
|
"loss": 1.6296, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 2.846651730146876e-05, |
|
"loss": 1.6881, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 2.834204630321135e-05, |
|
"loss": 1.6496, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8217575304953945e-05, |
|
"loss": 1.6484, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.8093104306696544e-05, |
|
"loss": 1.668, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.7968633308439135e-05, |
|
"loss": 1.6985, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.784416231018173e-05, |
|
"loss": 1.6479, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.771969131192432e-05, |
|
"loss": 1.6658, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.759522031366692e-05, |
|
"loss": 1.6707, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.747074931540951e-05, |
|
"loss": 1.6648, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.7346278317152106e-05, |
|
"loss": 1.6411, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.7221807318894698e-05, |
|
"loss": 1.6912, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.7097336320637296e-05, |
|
"loss": 1.6905, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.6972865322379887e-05, |
|
"loss": 1.6342, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.6848394324122482e-05, |
|
"loss": 1.6899, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.6723923325865074e-05, |
|
"loss": 1.6817, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.6599452327607672e-05, |
|
"loss": 1.6394, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.6474981329350264e-05, |
|
"loss": 1.6723, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.635051033109286e-05, |
|
"loss": 1.6732, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.622603933283545e-05, |
|
"loss": 1.6735, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6101568334578048e-05, |
|
"loss": 1.6764, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.597709733632064e-05, |
|
"loss": 1.6838, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.5852626338063234e-05, |
|
"loss": 1.7062, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5728155339805826e-05, |
|
"loss": 1.7033, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5603684341548424e-05, |
|
"loss": 1.6562, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5479213343291016e-05, |
|
"loss": 1.6796, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.535474234503361e-05, |
|
"loss": 1.6471, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5230271346776202e-05, |
|
"loss": 1.6678, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.51058003485188e-05, |
|
"loss": 1.6625, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.498132935026139e-05, |
|
"loss": 1.6801, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.4856858352003983e-05, |
|
"loss": 1.6805, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4732387353746578e-05, |
|
"loss": 1.6365, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4607916355489173e-05, |
|
"loss": 1.6771, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4483445357231764e-05, |
|
"loss": 1.6191, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.435897435897436e-05, |
|
"loss": 1.6861, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4234503360716954e-05, |
|
"loss": 1.6547, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.411003236245955e-05, |
|
"loss": 1.6819, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.398556136420214e-05, |
|
"loss": 1.6607, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3861090365944735e-05, |
|
"loss": 1.6664, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.373661936768733e-05, |
|
"loss": 1.6898, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.3612148369429922e-05, |
|
"loss": 1.647, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.3487677371172517e-05, |
|
"loss": 1.6634, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.336320637291511e-05, |
|
"loss": 1.6276, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.3238735374657706e-05, |
|
"loss": 1.6794, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.3114264376400298e-05, |
|
"loss": 1.6414, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.2989793378142893e-05, |
|
"loss": 1.6788, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.2865322379885488e-05, |
|
"loss": 1.6768, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.2740851381628083e-05, |
|
"loss": 1.6426, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.2616380383370674e-05, |
|
"loss": 1.661, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.249190938511327e-05, |
|
"loss": 1.6499, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.2367438386855864e-05, |
|
"loss": 1.6848, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.2242967388598455e-05, |
|
"loss": 1.6806, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.211849639034105e-05, |
|
"loss": 1.6611, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.1994025392083645e-05, |
|
"loss": 1.663, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.186955439382624e-05, |
|
"loss": 1.667, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.174508339556883e-05, |
|
"loss": 1.6528, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.1620612397311426e-05, |
|
"loss": 1.7216, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.149614139905402e-05, |
|
"loss": 1.6693, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.1371670400796616e-05, |
|
"loss": 1.6344, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.1247199402539207e-05, |
|
"loss": 1.697, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.1122728404281802e-05, |
|
"loss": 1.6922, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.0998257406024397e-05, |
|
"loss": 1.6451, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.0873786407766992e-05, |
|
"loss": 1.6365, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.0749315409509583e-05, |
|
"loss": 1.6809, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.062484441125218e-05, |
|
"loss": 1.6722, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.0500373412994773e-05, |
|
"loss": 1.6714, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.0375902414737365e-05, |
|
"loss": 1.6953, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.025143141647996e-05, |
|
"loss": 1.6722, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.0126960418222554e-05, |
|
"loss": 1.6553, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 2.000248941996515e-05, |
|
"loss": 1.6763, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.987801842170774e-05, |
|
"loss": 1.658, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.9753547423450336e-05, |
|
"loss": 1.6198, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.962907642519293e-05, |
|
"loss": 1.6644, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.9504605426935525e-05, |
|
"loss": 1.6557, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.9380134428678117e-05, |
|
"loss": 1.6391, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.9255663430420712e-05, |
|
"loss": 1.6471, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.9131192432163307e-05, |
|
"loss": 1.6517, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.9006721433905898e-05, |
|
"loss": 1.6794, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.8882250435648493e-05, |
|
"loss": 1.6582, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.8757779437391088e-05, |
|
"loss": 1.6731, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.8633308439133683e-05, |
|
"loss": 1.6154, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.8508837440876274e-05, |
|
"loss": 1.7092, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.838436644261887e-05, |
|
"loss": 1.6794, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.8259895444361464e-05, |
|
"loss": 1.7007, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.813542444610406e-05, |
|
"loss": 1.6928, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.801095344784665e-05, |
|
"loss": 1.7, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.7886482449589245e-05, |
|
"loss": 1.6848, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.776201145133184e-05, |
|
"loss": 1.6527, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.763754045307443e-05, |
|
"loss": 1.6382, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.7513069454817026e-05, |
|
"loss": 1.6832, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7388598456559625e-05, |
|
"loss": 1.6851, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.7264127458302216e-05, |
|
"loss": 1.6874, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.713965646004481e-05, |
|
"loss": 1.688, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.7015185461787406e-05, |
|
"loss": 1.7066, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.689071446353e-05, |
|
"loss": 1.6736, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.6766243465272592e-05, |
|
"loss": 1.6535, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.6641772467015187e-05, |
|
"loss": 1.6806, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.6517301468757782e-05, |
|
"loss": 1.6704, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.6392830470500377e-05, |
|
"loss": 1.6722, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.626835947224297e-05, |
|
"loss": 1.683, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.6143888473985563e-05, |
|
"loss": 1.7067, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.6019417475728158e-05, |
|
"loss": 1.6583, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.5894946477470753e-05, |
|
"loss": 1.6634, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.5770475479213344e-05, |
|
"loss": 1.661, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.564600448095594e-05, |
|
"loss": 1.6684, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.5521533482698534e-05, |
|
"loss": 1.6578, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.5397062484441126e-05, |
|
"loss": 1.6789, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.527259148618372e-05, |
|
"loss": 1.6399, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.5148120487926315e-05, |
|
"loss": 1.6568, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.5023649489668909e-05, |
|
"loss": 1.6593, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.4899178491411503e-05, |
|
"loss": 1.6826, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.4774707493154097e-05, |
|
"loss": 1.6472, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.4650236494896691e-05, |
|
"loss": 1.6399, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.4525765496639285e-05, |
|
"loss": 1.6552, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.440129449838188e-05, |
|
"loss": 1.6616, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.4276823500124473e-05, |
|
"loss": 1.6618, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.4152352501867066e-05, |
|
"loss": 1.6891, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.402788150360966e-05, |
|
"loss": 1.6816, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.3903410505352254e-05, |
|
"loss": 1.6268, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.3778939507094849e-05, |
|
"loss": 1.6939, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.3654468508837442e-05, |
|
"loss": 1.6795, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.3529997510580037e-05, |
|
"loss": 1.6547, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.340552651232263e-05, |
|
"loss": 1.6495, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.3281055514065225e-05, |
|
"loss": 1.6395, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.3156584515807818e-05, |
|
"loss": 1.679, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.3032113517550413e-05, |
|
"loss": 1.6671, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.2907642519293006e-05, |
|
"loss": 1.6423, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.27831715210356e-05, |
|
"loss": 1.6865, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.2658700522778194e-05, |
|
"loss": 1.6774, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.2534229524520787e-05, |
|
"loss": 1.677, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.240975852626338e-05, |
|
"loss": 1.6595, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.2285287528005974e-05, |
|
"loss": 1.6902, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.2160816529748569e-05, |
|
"loss": 1.641, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.2036345531491162e-05, |
|
"loss": 1.6345, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1911874533233758e-05, |
|
"loss": 1.6671, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.1787403534976351e-05, |
|
"loss": 1.6612, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.1662932536718946e-05, |
|
"loss": 1.7066, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.153846153846154e-05, |
|
"loss": 1.6706, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.1413990540204134e-05, |
|
"loss": 1.6684, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.1289519541946728e-05, |
|
"loss": 1.6837, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.116504854368932e-05, |
|
"loss": 1.6633, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.1040577545431916e-05, |
|
"loss": 1.6636, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0916106547174509e-05, |
|
"loss": 1.6492, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.0791635548917104e-05, |
|
"loss": 1.657, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.0667164550659697e-05, |
|
"loss": 1.6983, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0542693552402292e-05, |
|
"loss": 1.6567, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.0418222554144885e-05, |
|
"loss": 1.6906, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.029375155588748e-05, |
|
"loss": 1.6763, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.0169280557630073e-05, |
|
"loss": 1.6493, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.0044809559372668e-05, |
|
"loss": 1.6782, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 9.920338561115261e-06, |
|
"loss": 1.6419, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.795867562857854e-06, |
|
"loss": 1.6545, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 9.671396564600449e-06, |
|
"loss": 1.6938, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.546925566343042e-06, |
|
"loss": 1.6703, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 9.422454568085637e-06, |
|
"loss": 1.6446, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.29798356982823e-06, |
|
"loss": 1.6748, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 9.173512571570825e-06, |
|
"loss": 1.6677, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 9.049041573313418e-06, |
|
"loss": 1.6915, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 8.924570575056013e-06, |
|
"loss": 1.7075, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.800099576798606e-06, |
|
"loss": 1.6725, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.675628578541201e-06, |
|
"loss": 1.628, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.551157580283794e-06, |
|
"loss": 1.6319, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.42668658202639e-06, |
|
"loss": 1.6693, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 8.302215583768982e-06, |
|
"loss": 1.6116, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 8.177744585511576e-06, |
|
"loss": 1.6727, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 8.05327358725417e-06, |
|
"loss": 1.6661, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 7.928802588996764e-06, |
|
"loss": 1.6524, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.804331590739359e-06, |
|
"loss": 1.6712, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 7.679860592481952e-06, |
|
"loss": 1.635, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.555389594224547e-06, |
|
"loss": 1.6886, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.43091859596714e-06, |
|
"loss": 1.6956, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.306447597709734e-06, |
|
"loss": 1.6449, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.181976599452328e-06, |
|
"loss": 1.6394, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 7.057505601194922e-06, |
|
"loss": 1.6363, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 6.933034602937516e-06, |
|
"loss": 1.6478, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 6.80856360468011e-06, |
|
"loss": 1.6328, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 6.684092606422704e-06, |
|
"loss": 1.6792, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.559621608165298e-06, |
|
"loss": 1.6686, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 6.435150609907892e-06, |
|
"loss": 1.6756, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 6.310679611650486e-06, |
|
"loss": 1.6516, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.18620861339308e-06, |
|
"loss": 1.6629, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 6.061737615135674e-06, |
|
"loss": 1.6575, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 5.937266616878267e-06, |
|
"loss": 1.6632, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 5.812795618620861e-06, |
|
"loss": 1.6579, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 5.688324620363455e-06, |
|
"loss": 1.6385, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.563853622106049e-06, |
|
"loss": 1.6282, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 5.439382623848643e-06, |
|
"loss": 1.6828, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.314911625591237e-06, |
|
"loss": 1.6922, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.190440627333831e-06, |
|
"loss": 1.6483, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.065969629076425e-06, |
|
"loss": 1.6461, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.941498630819019e-06, |
|
"loss": 1.6797, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.817027632561613e-06, |
|
"loss": 1.6634, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.6925566343042074e-06, |
|
"loss": 1.6219, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.5680856360468015e-06, |
|
"loss": 1.6487, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.443614637789395e-06, |
|
"loss": 1.6446, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.319143639531989e-06, |
|
"loss": 1.5842, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.194672641274583e-06, |
|
"loss": 1.6766, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.070201643017177e-06, |
|
"loss": 1.6462, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.945730644759771e-06, |
|
"loss": 1.6763, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.821259646502365e-06, |
|
"loss": 1.6302, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 3.6967886482449596e-06, |
|
"loss": 1.6636, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.5723176499875532e-06, |
|
"loss": 1.6753, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.4478466517301472e-06, |
|
"loss": 1.6394, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.3233756534727413e-06, |
|
"loss": 1.6746, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 3.1989046552153353e-06, |
|
"loss": 1.6583, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.074433656957929e-06, |
|
"loss": 1.6619, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 2.949962658700523e-06, |
|
"loss": 1.7049, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 2.825491660443117e-06, |
|
"loss": 1.66, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.7010206621857105e-06, |
|
"loss": 1.6295, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.5765496639283046e-06, |
|
"loss": 1.6872, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.4520786656708986e-06, |
|
"loss": 1.6316, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.327607667413493e-06, |
|
"loss": 1.6652, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.203136669156087e-06, |
|
"loss": 1.6687, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 2.0786656708986807e-06, |
|
"loss": 1.6639, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 1.9541946726412747e-06, |
|
"loss": 1.6429, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.8297236743838687e-06, |
|
"loss": 1.6682, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.7052526761264627e-06, |
|
"loss": 1.6767, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.5807816778690567e-06, |
|
"loss": 1.6809, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.4563106796116506e-06, |
|
"loss": 1.658, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.3318396813542446e-06, |
|
"loss": 1.681, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.2073686830968386e-06, |
|
"loss": 1.6621, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.0828976848394324e-06, |
|
"loss": 1.6433, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 9.584266865820264e-07, |
|
"loss": 1.6748, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 8.339556883246204e-07, |
|
"loss": 1.6733, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 7.094846900672144e-07, |
|
"loss": 1.6509, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 5.850136918098084e-07, |
|
"loss": 1.6387, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.605426935524023e-07, |
|
"loss": 1.6497, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.360716952949963e-07, |
|
"loss": 1.6554, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 2.1160069703759027e-07, |
|
"loss": 1.6596, |
|
"step": 4000 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 4017, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 2.52809928769536e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|