|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.1202389843166543, |
|
"eval_steps": 500, |
|
"global_step": 1500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.98755290017426e-05, |
|
"loss": 1.789, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.975105800348519e-05, |
|
"loss": 1.7868, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.962658700522779e-05, |
|
"loss": 1.7871, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9502116006970375e-05, |
|
"loss": 1.7554, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.937764500871298e-05, |
|
"loss": 1.6774, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9253174010455565e-05, |
|
"loss": 1.7076, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.912870301219816e-05, |
|
"loss": 1.6946, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9004232013940755e-05, |
|
"loss": 1.7129, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.887976101568335e-05, |
|
"loss": 1.7002, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.8755290017425944e-05, |
|
"loss": 1.7244, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.863081901916854e-05, |
|
"loss": 1.7187, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.850634802091113e-05, |
|
"loss": 1.7207, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.838187702265373e-05, |
|
"loss": 1.7349, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.825740602439632e-05, |
|
"loss": 1.7021, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.813293502613891e-05, |
|
"loss": 1.6658, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.800846402788151e-05, |
|
"loss": 1.6851, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.78839930296241e-05, |
|
"loss": 1.6748, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.7759522031366697e-05, |
|
"loss": 1.6882, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.763505103310929e-05, |
|
"loss": 1.6568, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.751058003485188e-05, |
|
"loss": 1.7129, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.738610903659448e-05, |
|
"loss": 1.71, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.726163803833707e-05, |
|
"loss": 1.728, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.7137167040079664e-05, |
|
"loss": 1.7038, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.701269604182226e-05, |
|
"loss": 1.6916, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.6888225043564854e-05, |
|
"loss": 1.6826, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.676375404530744e-05, |
|
"loss": 1.7249, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.6639283047050044e-05, |
|
"loss": 1.6766, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.651481204879263e-05, |
|
"loss": 1.6795, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.6390341050535227e-05, |
|
"loss": 1.7304, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.626587005227782e-05, |
|
"loss": 1.747, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.6141399054020416e-05, |
|
"loss": 1.6345, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.601692805576301e-05, |
|
"loss": 1.6762, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.5892457057505606e-05, |
|
"loss": 1.6601, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.5767986059248194e-05, |
|
"loss": 1.6635, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.5643515060990796e-05, |
|
"loss": 1.6952, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.5519044062733384e-05, |
|
"loss": 1.683, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.539457306447598e-05, |
|
"loss": 1.657, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.5270102066218574e-05, |
|
"loss": 1.6471, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.514563106796117e-05, |
|
"loss": 1.6852, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.502116006970376e-05, |
|
"loss": 1.6737, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.489668907144636e-05, |
|
"loss": 1.6938, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.4772218073188946e-05, |
|
"loss": 1.6716, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.464774707493155e-05, |
|
"loss": 1.6678, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.4523276076674136e-05, |
|
"loss": 1.6229, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.439880507841673e-05, |
|
"loss": 1.659, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.4274334080159326e-05, |
|
"loss": 1.6842, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.414986308190192e-05, |
|
"loss": 1.6751, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.4025392083644516e-05, |
|
"loss": 1.6745, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.390092108538711e-05, |
|
"loss": 1.6791, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.37764500871297e-05, |
|
"loss": 1.6958, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.36519790888723e-05, |
|
"loss": 1.6891, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.352750809061489e-05, |
|
"loss": 1.6732, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.340303709235748e-05, |
|
"loss": 1.6965, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.327856609410008e-05, |
|
"loss": 1.6567, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.315409509584267e-05, |
|
"loss": 1.6773, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.302962409758526e-05, |
|
"loss": 1.7101, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.290515309932786e-05, |
|
"loss": 1.6942, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.278068210107045e-05, |
|
"loss": 1.6925, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.2656211102813046e-05, |
|
"loss": 1.6626, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.253174010455564e-05, |
|
"loss": 1.7122, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.2407269106298235e-05, |
|
"loss": 1.6773, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.228279810804083e-05, |
|
"loss": 1.6429, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.2158327109783425e-05, |
|
"loss": 1.6471, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.203385611152601e-05, |
|
"loss": 1.6818, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.1909385113268615e-05, |
|
"loss": 1.6826, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.17849141150112e-05, |
|
"loss": 1.6817, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.16604431167538e-05, |
|
"loss": 1.7324, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.153597211849639e-05, |
|
"loss": 1.7007, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.141150112023899e-05, |
|
"loss": 1.6553, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.128703012198158e-05, |
|
"loss": 1.6715, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.116255912372418e-05, |
|
"loss": 1.6409, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.1038088125466765e-05, |
|
"loss": 1.7091, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.091361712720937e-05, |
|
"loss": 1.7258, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.0789146128951955e-05, |
|
"loss": 1.6653, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.066467513069455e-05, |
|
"loss": 1.7244, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.0540204132437145e-05, |
|
"loss": 1.6709, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.041573313417974e-05, |
|
"loss": 1.6641, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.029126213592233e-05, |
|
"loss": 1.6804, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.016679113766493e-05, |
|
"loss": 1.6868, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.004232013940752e-05, |
|
"loss": 1.6939, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 3.991784914115011e-05, |
|
"loss": 1.6804, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.979337814289271e-05, |
|
"loss": 1.6861, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.96689071446353e-05, |
|
"loss": 1.6643, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.95444361463779e-05, |
|
"loss": 1.6765, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 3.941996514812049e-05, |
|
"loss": 1.6819, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.929549414986308e-05, |
|
"loss": 1.6677, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.917102315160568e-05, |
|
"loss": 1.6847, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.904655215334827e-05, |
|
"loss": 1.608, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.8922081155090865e-05, |
|
"loss": 1.6625, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.879761015683346e-05, |
|
"loss": 1.6619, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 3.8673139158576054e-05, |
|
"loss": 1.7024, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.854866816031865e-05, |
|
"loss": 1.73, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.8424197162061244e-05, |
|
"loss": 1.6909, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 3.829972616380383e-05, |
|
"loss": 1.7046, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.8175255165546434e-05, |
|
"loss": 1.666, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.805078416728902e-05, |
|
"loss": 1.6979, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.792631316903162e-05, |
|
"loss": 1.6619, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.780184217077421e-05, |
|
"loss": 1.7073, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.7677371172516806e-05, |
|
"loss": 1.6487, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.7552900174259395e-05, |
|
"loss": 1.6706, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.7428429176001996e-05, |
|
"loss": 1.7042, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.7303958177744584e-05, |
|
"loss": 1.6835, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.717948717948718e-05, |
|
"loss": 1.668, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.7055016181229774e-05, |
|
"loss": 1.6358, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.693054518297237e-05, |
|
"loss": 1.6796, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.6806074184714964e-05, |
|
"loss": 1.6369, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.668160318645756e-05, |
|
"loss": 1.6751, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.655713218820015e-05, |
|
"loss": 1.7043, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 3.643266118994275e-05, |
|
"loss": 1.7065, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.6308190191685337e-05, |
|
"loss": 1.6562, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.618371919342793e-05, |
|
"loss": 1.6445, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.6059248195170526e-05, |
|
"loss": 1.6906, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.593477719691312e-05, |
|
"loss": 1.6744, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.5810306198655716e-05, |
|
"loss": 1.6557, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 3.568583520039831e-05, |
|
"loss": 1.6482, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.55613642021409e-05, |
|
"loss": 1.6418, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.54368932038835e-05, |
|
"loss": 1.6888, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.531242220562609e-05, |
|
"loss": 1.7157, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.5187951207368684e-05, |
|
"loss": 1.6781, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.506348020911128e-05, |
|
"loss": 1.705, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.493900921085387e-05, |
|
"loss": 1.6837, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.481453821259646e-05, |
|
"loss": 1.7337, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.469006721433906e-05, |
|
"loss": 1.6492, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.456559621608165e-05, |
|
"loss": 1.7011, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.4441125217824246e-05, |
|
"loss": 1.6501, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.431665421956684e-05, |
|
"loss": 1.6614, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.4192183221309436e-05, |
|
"loss": 1.7155, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.406771222305203e-05, |
|
"loss": 1.6906, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.3943241224794625e-05, |
|
"loss": 1.6602, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.3818770226537214e-05, |
|
"loss": 1.66, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.3694299228279815e-05, |
|
"loss": 1.7053, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.35698282300224e-05, |
|
"loss": 1.6676, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.3445357231765e-05, |
|
"loss": 1.6714, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.332088623350759e-05, |
|
"loss": 1.6601, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.319641523525019e-05, |
|
"loss": 1.6518, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.307194423699278e-05, |
|
"loss": 1.6621, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.294747323873538e-05, |
|
"loss": 1.6654, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.2823002240477966e-05, |
|
"loss": 1.6664, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.269853124222057e-05, |
|
"loss": 1.6809, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.2574060243963156e-05, |
|
"loss": 1.6632, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.244958924570575e-05, |
|
"loss": 1.6527, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.2325118247448345e-05, |
|
"loss": 1.6937, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.220064724919094e-05, |
|
"loss": 1.6931, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.2076176250933535e-05, |
|
"loss": 1.6712, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.195170525267613e-05, |
|
"loss": 1.6707, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.182723425441872e-05, |
|
"loss": 1.6674, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.170276325616132e-05, |
|
"loss": 1.7057, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.157829225790391e-05, |
|
"loss": 1.7014, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.14538212596465e-05, |
|
"loss": 1.6852, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.13293502613891e-05, |
|
"loss": 1.6425, |
|
"step": 1500 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 4017, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 9.4803723288576e+17, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|