File size: 2,322 Bytes
da6291c ef26ef0 da6291c ef26ef0 da6291c ef26ef0 da6291c ef26ef0 da6291c ef26ef0 da6291c ef26ef0 da6291c ef26ef0 da6291c ef26ef0 da6291c ef26ef0 da6291c ef26ef0 da6291c ef26ef0 da6291c ef26ef0 da6291c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 |
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.0,
"eval_steps": 500,
"global_step": 81,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 1.0,
"eval_B-Claim": {
"f1-score": 0.37160751565762,
"precision": 0.42788461538461536,
"recall": 0.3284132841328413,
"support": 271.0
},
"eval_B-MajorClaim": {
"f1-score": 0.3578947368421052,
"precision": 0.6666666666666666,
"recall": 0.2446043165467626,
"support": 139.0
},
"eval_B-Premise": {
"f1-score": 0.8640915593705293,
"precision": 0.7895424836601307,
"recall": 0.9541864139020537,
"support": 633.0
},
"eval_I-Claim": {
"f1-score": 0.5003402749421533,
"precision": 0.5493126120741183,
"recall": 0.4593851537115721,
"support": 4001.0
},
"eval_I-MajorClaim": {
"f1-score": 0.7718093699515347,
"precision": 0.6502211636611093,
"recall": 0.9493293591654247,
"support": 2013.0
},
"eval_I-Premise": {
"f1-score": 0.875016720916752,
"precision": 0.8846812731043188,
"recall": 0.865561044460127,
"support": 11336.0
},
"eval_O": {
"f1-score": 0.9992483530087988,
"precision": 0.9995577178239717,
"recall": 0.998939179632249,
"support": 11312.0
},
"eval_accuracy": 0.8614038040733883,
"eval_loss": 0.31713685393333435,
"eval_macro avg": {
"f1-score": 0.6771440758127848,
"precision": 0.7096952189107044,
"recall": 0.685774107364433,
"support": 29705.0
},
"eval_runtime": 4.8338,
"eval_samples_per_second": 16.55,
"eval_steps_per_second": 2.069,
"eval_weighted avg": {
"f1-score": 0.8576207231627551,
"precision": 0.8601529227027923,
"recall": 0.8614038040733883,
"support": 29705.0
},
"step": 81
}
],
"logging_steps": 500,
"max_steps": 4050,
"num_input_tokens_seen": 0,
"num_train_epochs": 50,
"save_steps": 500,
"total_flos": 143790812718000.0,
"train_batch_size": 4,
"trial_name": null,
"trial_params": null
}
|