|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.22117777163395078, |
|
"global_step": 400, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9999895164082156e-05, |
|
"loss": 1.6349, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999958065720787e-05, |
|
"loss": 1.6199, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999905648201487e-05, |
|
"loss": 1.4834, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999832264289934e-05, |
|
"loss": 1.3882, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999737914601591e-05, |
|
"loss": 1.3679, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.999622599927756e-05, |
|
"loss": 1.2396, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.999486321235559e-05, |
|
"loss": 1.321, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9993290796679516e-05, |
|
"loss": 1.2874, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.999150876543699e-05, |
|
"loss": 1.2607, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9989517133573694e-05, |
|
"loss": 1.2454, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9987315917793174e-05, |
|
"loss": 1.2799, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.998490513655676e-05, |
|
"loss": 1.2575, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.998228481008337e-05, |
|
"loss": 1.2404, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.997945496034934e-05, |
|
"loss": 1.2219, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9976415611088267e-05, |
|
"loss": 1.2241, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.997316678779079e-05, |
|
"loss": 1.1716, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.996970851770438e-05, |
|
"loss": 1.1883, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9966040829833115e-05, |
|
"loss": 1.205, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9962163754937426e-05, |
|
"loss": 1.1246, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.995807732553384e-05, |
|
"loss": 1.1636, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9953781575894723e-05, |
|
"loss": 1.158, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9949276542048e-05, |
|
"loss": 1.1477, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9944562261776805e-05, |
|
"loss": 1.1678, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9939638774619216e-05, |
|
"loss": 1.1501, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.99345061218679e-05, |
|
"loss": 1.1955, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9929164346569756e-05, |
|
"loss": 1.1724, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9923613493525576e-05, |
|
"loss": 1.177, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.991785360928968e-05, |
|
"loss": 1.1418, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.991188474216947e-05, |
|
"loss": 1.1898, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.9905706942225094e-05, |
|
"loss": 1.1479, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.9899320261268966e-05, |
|
"loss": 1.1356, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.989272475286537e-05, |
|
"loss": 1.1397, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.9885920472330004e-05, |
|
"loss": 1.1215, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.9878907476729516e-05, |
|
"loss": 1.167, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.9871685824881e-05, |
|
"loss": 1.1219, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.9864255577351534e-05, |
|
"loss": 1.0835, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.985661679645769e-05, |
|
"loss": 1.0721, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.9848769546264915e-05, |
|
"loss": 1.0692, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.9840713892587146e-05, |
|
"loss": 1.0488, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.983244990298609e-05, |
|
"loss": 1.1285, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.982397764677081e-05, |
|
"loss": 1.0832, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.981529719499704e-05, |
|
"loss": 1.0652, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.980640862046663e-05, |
|
"loss": 1.1043, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.979731199772693e-05, |
|
"loss": 1.112, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.9788007403070146e-05, |
|
"loss": 1.1029, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.977849491453277e-05, |
|
"loss": 1.0869, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.976877461189481e-05, |
|
"loss": 1.0843, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.975884657667922e-05, |
|
"loss": 1.0789, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.974871089215118e-05, |
|
"loss": 1.0449, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.9738367643317405e-05, |
|
"loss": 1.1053, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.9727816916925395e-05, |
|
"loss": 1.0651, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.971705880146276e-05, |
|
"loss": 1.0828, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.970609338715646e-05, |
|
"loss": 1.0932, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.969492076597203e-05, |
|
"loss": 1.0648, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.968354103161283e-05, |
|
"loss": 1.0948, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.967195427951926e-05, |
|
"loss": 1.0721, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.9660160606867936e-05, |
|
"loss": 1.124, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.9648160112570896e-05, |
|
"loss": 1.0963, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.9635952897274773e-05, |
|
"loss": 1.1078, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.9623539063359925e-05, |
|
"loss": 1.1059, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.961091871493962e-05, |
|
"loss": 1.1032, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.959809195785912e-05, |
|
"loss": 1.0595, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.958505889969481e-05, |
|
"loss": 1.1096, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.957181964975329e-05, |
|
"loss": 1.0589, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.955837431907049e-05, |
|
"loss": 1.0608, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.954472302041069e-05, |
|
"loss": 1.0819, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.9530865868265605e-05, |
|
"loss": 1.0759, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.951680297885342e-05, |
|
"loss": 1.0515, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.950253447011779e-05, |
|
"loss": 1.0371, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.948806046172691e-05, |
|
"loss": 1.0619, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.947338107507245e-05, |
|
"loss": 1.0757, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.945849643326857e-05, |
|
"loss": 1.0686, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.9443406661150874e-05, |
|
"loss": 1.0809, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.942811188527537e-05, |
|
"loss": 1.0704, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.941261223391742e-05, |
|
"loss": 1.0655, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.939690783707063e-05, |
|
"loss": 1.1182, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.938099882644578e-05, |
|
"loss": 1.081, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.9364885335469734e-05, |
|
"loss": 1.0792, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.93485674992843e-05, |
|
"loss": 1.0244, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.933204545474511e-05, |
|
"loss": 1.0531, |
|
"step": 400 |
|
} |
|
], |
|
"max_steps": 5424, |
|
"num_train_epochs": 3, |
|
"total_flos": 1.29988124737536e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|