|
{ |
|
"best_metric": 0.6081525683403015, |
|
"best_model_checkpoint": "autotrain-cwa7t-r6nu7/checkpoint-500", |
|
"epoch": 2.0, |
|
"eval_steps": 500, |
|
"global_step": 500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.048, |
|
"grad_norm": 5.120242118835449, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 1.077, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.096, |
|
"grad_norm": 3.48054838180542, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 1.0941, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.144, |
|
"grad_norm": 4.692188262939453, |
|
"learning_rate": 1.44e-05, |
|
"loss": 1.0877, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.192, |
|
"grad_norm": 4.942972183227539, |
|
"learning_rate": 1.9200000000000003e-05, |
|
"loss": 1.0566, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 10.25078296661377, |
|
"learning_rate": 2.36e-05, |
|
"loss": 0.9183, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.288, |
|
"grad_norm": 25.465534210205078, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 0.9146, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.336, |
|
"grad_norm": 21.620311737060547, |
|
"learning_rate": 3.2800000000000004e-05, |
|
"loss": 0.8352, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.384, |
|
"grad_norm": 12.405364036560059, |
|
"learning_rate": 3.76e-05, |
|
"loss": 0.8715, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.432, |
|
"grad_norm": 20.498157501220703, |
|
"learning_rate": 4.24e-05, |
|
"loss": 0.7661, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 14.002096176147461, |
|
"learning_rate": 4.72e-05, |
|
"loss": 0.6987, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.528, |
|
"grad_norm": 15.950400352478027, |
|
"learning_rate": 4.977777777777778e-05, |
|
"loss": 0.7478, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.576, |
|
"grad_norm": 14.89918327331543, |
|
"learning_rate": 4.928888888888889e-05, |
|
"loss": 0.8167, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.624, |
|
"grad_norm": 21.52458381652832, |
|
"learning_rate": 4.875555555555556e-05, |
|
"loss": 0.8677, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.672, |
|
"grad_norm": 9.06266975402832, |
|
"learning_rate": 4.8222222222222225e-05, |
|
"loss": 0.7637, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 17.687030792236328, |
|
"learning_rate": 4.768888888888889e-05, |
|
"loss": 0.7588, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.768, |
|
"grad_norm": 18.937341690063477, |
|
"learning_rate": 4.715555555555556e-05, |
|
"loss": 0.7809, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.816, |
|
"grad_norm": 20.595829010009766, |
|
"learning_rate": 4.662222222222222e-05, |
|
"loss": 0.6525, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.864, |
|
"grad_norm": 14.021669387817383, |
|
"learning_rate": 4.608888888888889e-05, |
|
"loss": 0.7512, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.912, |
|
"grad_norm": 17.159658432006836, |
|
"learning_rate": 4.555555555555556e-05, |
|
"loss": 0.6444, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 15.096858978271484, |
|
"learning_rate": 4.502222222222223e-05, |
|
"loss": 0.7502, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.7367367367367368, |
|
"eval_f1_macro": 0.6780399587593059, |
|
"eval_f1_micro": 0.7367367367367368, |
|
"eval_f1_weighted": 0.7159182064227351, |
|
"eval_loss": 0.7116303443908691, |
|
"eval_precision_macro": 0.7407168006456986, |
|
"eval_precision_micro": 0.7367367367367368, |
|
"eval_precision_weighted": 0.738750739597193, |
|
"eval_recall_macro": 0.6747808186254532, |
|
"eval_recall_micro": 0.7367367367367368, |
|
"eval_recall_weighted": 0.7367367367367368, |
|
"eval_runtime": 1.2649, |
|
"eval_samples_per_second": 789.799, |
|
"eval_steps_per_second": 25.299, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.008, |
|
"grad_norm": 3.268965244293213, |
|
"learning_rate": 4.448888888888889e-05, |
|
"loss": 0.6109, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.056, |
|
"grad_norm": 9.524105072021484, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 0.6676, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 1.104, |
|
"grad_norm": 13.357897758483887, |
|
"learning_rate": 4.346666666666667e-05, |
|
"loss": 0.6534, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 1.152, |
|
"grad_norm": 12.989001274108887, |
|
"learning_rate": 4.293333333333334e-05, |
|
"loss": 0.5564, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 13.040504455566406, |
|
"learning_rate": 4.24e-05, |
|
"loss": 0.5432, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.248, |
|
"grad_norm": 27.441036224365234, |
|
"learning_rate": 4.186666666666667e-05, |
|
"loss": 0.5495, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 1.296, |
|
"grad_norm": 12.519679069519043, |
|
"learning_rate": 4.133333333333333e-05, |
|
"loss": 0.548, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 1.3439999999999999, |
|
"grad_norm": 19.905864715576172, |
|
"learning_rate": 4.08e-05, |
|
"loss": 0.7036, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.392, |
|
"grad_norm": 19.19709014892578, |
|
"learning_rate": 4.026666666666667e-05, |
|
"loss": 0.6243, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 25.307151794433594, |
|
"learning_rate": 3.9733333333333335e-05, |
|
"loss": 0.5592, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.488, |
|
"grad_norm": 12.860663414001465, |
|
"learning_rate": 3.9200000000000004e-05, |
|
"loss": 0.5251, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.536, |
|
"grad_norm": 14.503999710083008, |
|
"learning_rate": 3.866666666666667e-05, |
|
"loss": 0.5004, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.584, |
|
"grad_norm": 16.512439727783203, |
|
"learning_rate": 3.8133333333333336e-05, |
|
"loss": 0.5418, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.6320000000000001, |
|
"grad_norm": 10.380609512329102, |
|
"learning_rate": 3.76e-05, |
|
"loss": 0.6202, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.6800000000000002, |
|
"grad_norm": 14.396175384521484, |
|
"learning_rate": 3.706666666666667e-05, |
|
"loss": 0.6095, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.728, |
|
"grad_norm": 15.407332420349121, |
|
"learning_rate": 3.653333333333334e-05, |
|
"loss": 0.6155, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 1.776, |
|
"grad_norm": 21.06899642944336, |
|
"learning_rate": 3.6e-05, |
|
"loss": 0.5009, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 1.8239999999999998, |
|
"grad_norm": 9.709734916687012, |
|
"learning_rate": 3.546666666666667e-05, |
|
"loss": 0.4664, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.8719999999999999, |
|
"grad_norm": 30.69609260559082, |
|
"learning_rate": 3.493333333333333e-05, |
|
"loss": 0.6268, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 10.711507797241211, |
|
"learning_rate": 3.4399999999999996e-05, |
|
"loss": 0.5064, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.968, |
|
"grad_norm": 10.850369453430176, |
|
"learning_rate": 3.3866666666666665e-05, |
|
"loss": 0.5403, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.7567567567567568, |
|
"eval_f1_macro": 0.7293016589919367, |
|
"eval_f1_micro": 0.7567567567567568, |
|
"eval_f1_weighted": 0.7525753769969824, |
|
"eval_loss": 0.6081525683403015, |
|
"eval_precision_macro": 0.7459781321674904, |
|
"eval_precision_micro": 0.7567567567567568, |
|
"eval_precision_weighted": 0.7607241180619724, |
|
"eval_recall_macro": 0.727181992488115, |
|
"eval_recall_micro": 0.7567567567567568, |
|
"eval_recall_weighted": 0.7567567567567568, |
|
"eval_runtime": 1.2636, |
|
"eval_samples_per_second": 790.605, |
|
"eval_steps_per_second": 25.325, |
|
"step": 500 |
|
} |
|
], |
|
"logging_steps": 12, |
|
"max_steps": 1250, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.01 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 525305938493952.0, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|