|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 50.0, |
|
"eval_steps": 500, |
|
"global_step": 17300, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 4.855491329479769e-05, |
|
"loss": 1.6594, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 4.710982658959538e-05, |
|
"loss": 0.9215, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 4.566473988439307e-05, |
|
"loss": 0.6453, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 4.421965317919075e-05, |
|
"loss": 0.4758, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"learning_rate": 4.2774566473988445e-05, |
|
"loss": 0.3575, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 8.67, |
|
"learning_rate": 4.132947976878613e-05, |
|
"loss": 0.3063, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 10.12, |
|
"learning_rate": 3.988439306358382e-05, |
|
"loss": 0.31, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 11.56, |
|
"learning_rate": 3.84393063583815e-05, |
|
"loss": 0.3325, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 13.01, |
|
"learning_rate": 3.699421965317919e-05, |
|
"loss": 0.414, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 14.45, |
|
"learning_rate": 3.554913294797688e-05, |
|
"loss": 0.4969, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 15.9, |
|
"learning_rate": 3.410404624277457e-05, |
|
"loss": 0.668, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 17.34, |
|
"learning_rate": 3.265895953757225e-05, |
|
"loss": 0.7799, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 18.79, |
|
"learning_rate": 3.1213872832369946e-05, |
|
"loss": 0.8613, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 20.23, |
|
"learning_rate": 2.9768786127167632e-05, |
|
"loss": 0.8462, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 21.68, |
|
"learning_rate": 2.832369942196532e-05, |
|
"loss": 0.7924, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 23.12, |
|
"learning_rate": 2.6878612716763007e-05, |
|
"loss": 0.7011, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 24.57, |
|
"learning_rate": 2.5433526011560693e-05, |
|
"loss": 0.5652, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 26.01, |
|
"learning_rate": 2.3988439306358382e-05, |
|
"loss": 0.4699, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 27.46, |
|
"learning_rate": 2.254335260115607e-05, |
|
"loss": 0.3833, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 28.9, |
|
"learning_rate": 2.1098265895953757e-05, |
|
"loss": 0.3351, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 30.35, |
|
"learning_rate": 1.9653179190751446e-05, |
|
"loss": 0.2929, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 31.79, |
|
"learning_rate": 1.8208092485549132e-05, |
|
"loss": 0.2636, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 33.24, |
|
"learning_rate": 1.676300578034682e-05, |
|
"loss": 0.2358, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 34.68, |
|
"learning_rate": 1.531791907514451e-05, |
|
"loss": 0.2153, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 36.13, |
|
"learning_rate": 1.3872832369942197e-05, |
|
"loss": 0.1957, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 37.57, |
|
"learning_rate": 1.2427745664739884e-05, |
|
"loss": 0.1784, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 39.02, |
|
"learning_rate": 1.0982658959537573e-05, |
|
"loss": 0.169, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 40.46, |
|
"learning_rate": 9.53757225433526e-06, |
|
"loss": 0.1565, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 41.91, |
|
"learning_rate": 8.092485549132949e-06, |
|
"loss": 0.1508, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 43.35, |
|
"learning_rate": 6.647398843930635e-06, |
|
"loss": 0.1412, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 44.8, |
|
"learning_rate": 5.202312138728324e-06, |
|
"loss": 0.1378, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 46.24, |
|
"learning_rate": 3.757225433526012e-06, |
|
"loss": 0.1329, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 47.69, |
|
"learning_rate": 2.3121387283236993e-06, |
|
"loss": 0.131, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 49.13, |
|
"learning_rate": 8.670520231213873e-07, |
|
"loss": 0.1268, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 50.0, |
|
"step": 17300, |
|
"total_flos": 4.337225635212288e+17, |
|
"train_loss": 0.4313709532456591, |
|
"train_runtime": 11982.9489, |
|
"train_samples_per_second": 369.375, |
|
"train_steps_per_second": 1.444 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 17300, |
|
"num_train_epochs": 50, |
|
"save_steps": 500, |
|
"total_flos": 4.337225635212288e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|