jfranklin-foundry's picture
Upload folder using huggingface_hub
1794980 verified
raw
history blame
2.12 kB
{
"best_metric": 0.9334564208984375,
"best_model_checkpoint": "outputs/checkpoint-138",
"epoch": 2.979757085020243,
"eval_steps": 500,
"global_step": 138,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.4318488529014845,
"grad_norm": 0.5840024948120117,
"learning_rate": 4e-05,
"loss": 1.9547,
"step": 20
},
{
"epoch": 0.863697705802969,
"grad_norm": 0.4871944487094879,
"learning_rate": 8e-05,
"loss": 1.8203,
"step": 40
},
{
"epoch": 0.9932523616734144,
"eval_loss": 1.5045615434646606,
"eval_runtime": 13.3054,
"eval_samples_per_second": 27.959,
"eval_steps_per_second": 3.532,
"step": 46
},
{
"epoch": 1.2955465587044535,
"grad_norm": 0.6788378953933716,
"learning_rate": 0.00012,
"loss": 1.6913,
"step": 60
},
{
"epoch": 1.7273954116059378,
"grad_norm": 0.8430602550506592,
"learning_rate": 0.00016,
"loss": 1.4654,
"step": 80
},
{
"epoch": 1.9865047233468287,
"eval_loss": 1.10453200340271,
"eval_runtime": 13.2625,
"eval_samples_per_second": 28.049,
"eval_steps_per_second": 3.544,
"step": 92
},
{
"epoch": 2.1592442645074224,
"grad_norm": 1.1823046207427979,
"learning_rate": 0.0002,
"loss": 1.3454,
"step": 100
},
{
"epoch": 2.591093117408907,
"grad_norm": 1.2263983488082886,
"learning_rate": 0.00019957591414302984,
"loss": 1.1674,
"step": 120
},
{
"epoch": 2.979757085020243,
"eval_loss": 0.9334564208984375,
"eval_runtime": 13.257,
"eval_samples_per_second": 28.061,
"eval_steps_per_second": 3.545,
"step": 138
}
],
"logging_steps": 20,
"max_steps": 782,
"num_input_tokens_seen": 0,
"num_train_epochs": 17,
"save_steps": 500,
"total_flos": 1.0633812858433536e+16,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}