jfranklin-foundry's picture
Upload folder using huggingface_hub
160bf60 verified
raw
history blame
879 Bytes
{
"best_metric": 2.158191204071045,
"best_model_checkpoint": "outputs/checkpoint-34",
"epoch": 0.974910394265233,
"eval_steps": 500,
"global_step": 34,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.5734767025089605,
"grad_norm": 1.373448371887207,
"learning_rate": 2e-05,
"loss": 2.8781,
"step": 20
},
{
"epoch": 0.974910394265233,
"eval_loss": 2.158191204071045,
"eval_runtime": 90.2612,
"eval_samples_per_second": 12.364,
"eval_steps_per_second": 1.551,
"step": 34
}
],
"logging_steps": 20,
"max_steps": 170,
"num_input_tokens_seen": 0,
"num_train_epochs": 5,
"save_steps": 500,
"total_flos": 1843618074722304.0,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}