|
{ |
|
"best_metric": 0.9142857142857143, |
|
"best_model_checkpoint": "videomae-base-finetuned-ucf101-subset/checkpoint-225", |
|
"epoch": 3.25, |
|
"eval_steps": 500, |
|
"global_step": 300, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 2.3558, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 2.293, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 5e-05, |
|
"loss": 2.3089, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.814814814814815e-05, |
|
"loss": 2.1925, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.62962962962963e-05, |
|
"loss": 1.9868, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.4444444444444447e-05, |
|
"loss": 2.0795, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.259259259259259e-05, |
|
"loss": 1.835, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"eval_accuracy": 0.4142857142857143, |
|
"eval_loss": 1.5383577346801758, |
|
"eval_runtime": 14.5132, |
|
"eval_samples_per_second": 4.823, |
|
"eval_steps_per_second": 1.24, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.074074074074074e-05, |
|
"loss": 1.5051, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.888888888888889e-05, |
|
"loss": 1.0974, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.7037037037037037e-05, |
|
"loss": 1.0559, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.518518518518519e-05, |
|
"loss": 0.9881, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 1.1758, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.148148148148148e-05, |
|
"loss": 0.9197, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 2.962962962962963e-05, |
|
"loss": 0.6825, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.777777777777778e-05, |
|
"loss": 0.5822, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"eval_accuracy": 0.8, |
|
"eval_loss": 0.6967638731002808, |
|
"eval_runtime": 14.2672, |
|
"eval_samples_per_second": 4.906, |
|
"eval_steps_per_second": 1.262, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 2.5925925925925925e-05, |
|
"loss": 0.4962, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 2.4074074074074074e-05, |
|
"loss": 0.6887, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 2.2222222222222223e-05, |
|
"loss": 0.3894, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 2.037037037037037e-05, |
|
"loss": 0.6543, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.8518518518518518e-05, |
|
"loss": 0.3451, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 0.4634, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.4814814814814815e-05, |
|
"loss": 0.2967, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"eval_accuracy": 0.9142857142857143, |
|
"eval_loss": 0.36007827520370483, |
|
"eval_runtime": 14.3031, |
|
"eval_samples_per_second": 4.894, |
|
"eval_steps_per_second": 1.258, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 1.2962962962962962e-05, |
|
"loss": 0.2401, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 1.1111111111111112e-05, |
|
"loss": 0.181, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 9.259259259259259e-06, |
|
"loss": 0.2992, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 7.4074074074074075e-06, |
|
"loss": 0.1978, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 5.555555555555556e-06, |
|
"loss": 0.1838, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 3.7037037037037037e-06, |
|
"loss": 0.1787, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 1.8518518518518519e-06, |
|
"loss": 0.1258, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 0.0, |
|
"loss": 0.0952, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"eval_accuracy": 0.9, |
|
"eval_loss": 0.2772606909275055, |
|
"eval_runtime": 14.5007, |
|
"eval_samples_per_second": 4.827, |
|
"eval_steps_per_second": 1.241, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"step": 300, |
|
"total_flos": 1.495384188125184e+18, |
|
"train_loss": 0.9297949550549189, |
|
"train_runtime": 631.9789, |
|
"train_samples_per_second": 1.899, |
|
"train_steps_per_second": 0.475 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"eval_accuracy": 0.896774193548387, |
|
"eval_loss": 0.40425971150398254, |
|
"eval_runtime": 32.9264, |
|
"eval_samples_per_second": 4.707, |
|
"eval_steps_per_second": 1.184, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"eval_accuracy": 0.896774193548387, |
|
"eval_loss": 0.40425965189933777, |
|
"eval_runtime": 32.1373, |
|
"eval_samples_per_second": 4.823, |
|
"eval_steps_per_second": 1.214, |
|
"step": 300 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 300, |
|
"num_train_epochs": 9223372036854775807, |
|
"save_steps": 500, |
|
"total_flos": 1.495384188125184e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|