|
{ |
|
"best_metric": 0.877387523651123, |
|
"best_model_checkpoint": "autotrain-8c1y5-lsgog/checkpoint-588", |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 588, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.04591836734693878, |
|
"grad_norm": 13.273290634155273, |
|
"learning_rate": 5.932203389830509e-06, |
|
"loss": 1.3557, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.09183673469387756, |
|
"grad_norm": Infinity, |
|
"learning_rate": 1.2711864406779661e-05, |
|
"loss": 1.4241, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.1377551020408163, |
|
"grad_norm": 16.573213577270508, |
|
"learning_rate": 2.033898305084746e-05, |
|
"loss": 1.3423, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.1836734693877551, |
|
"grad_norm": 11.559469223022461, |
|
"learning_rate": 2.7966101694915255e-05, |
|
"loss": 1.3488, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.22959183673469388, |
|
"grad_norm": 13.121813774108887, |
|
"learning_rate": 3.474576271186441e-05, |
|
"loss": 1.2955, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.2755102040816326, |
|
"grad_norm": 19.90020179748535, |
|
"learning_rate": 4.152542372881356e-05, |
|
"loss": 1.3652, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.32142857142857145, |
|
"grad_norm": 42.38658142089844, |
|
"learning_rate": 4.915254237288136e-05, |
|
"loss": 1.3228, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.3673469387755102, |
|
"grad_norm": 16.879314422607422, |
|
"learning_rate": 4.9243856332703216e-05, |
|
"loss": 1.4373, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.413265306122449, |
|
"grad_norm": 10.268380165100098, |
|
"learning_rate": 4.839319470699433e-05, |
|
"loss": 1.4207, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.45918367346938777, |
|
"grad_norm": 49.17950439453125, |
|
"learning_rate": 4.754253308128545e-05, |
|
"loss": 1.3291, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.5051020408163265, |
|
"grad_norm": 10.70398235321045, |
|
"learning_rate": 4.669187145557656e-05, |
|
"loss": 1.2806, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.5510204081632653, |
|
"grad_norm": 14.200324058532715, |
|
"learning_rate": 4.584120982986768e-05, |
|
"loss": 1.5323, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.5969387755102041, |
|
"grad_norm": 13.224357604980469, |
|
"learning_rate": 4.499054820415879e-05, |
|
"loss": 1.3381, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.6428571428571429, |
|
"grad_norm": 12.96914291381836, |
|
"learning_rate": 4.413988657844991e-05, |
|
"loss": 1.3752, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.6887755102040817, |
|
"grad_norm": 9.912948608398438, |
|
"learning_rate": 4.3289224952741024e-05, |
|
"loss": 1.2576, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.7346938775510204, |
|
"grad_norm": 8.655780792236328, |
|
"learning_rate": 4.243856332703214e-05, |
|
"loss": 1.1475, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.7806122448979592, |
|
"grad_norm": 6.818447113037109, |
|
"learning_rate": 4.158790170132325e-05, |
|
"loss": 1.1608, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.826530612244898, |
|
"grad_norm": 36.97886276245117, |
|
"learning_rate": 4.073724007561437e-05, |
|
"loss": 1.0747, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.8724489795918368, |
|
"grad_norm": 18.22942543029785, |
|
"learning_rate": 3.988657844990548e-05, |
|
"loss": 1.0558, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.9183673469387755, |
|
"grad_norm": 14.418513298034668, |
|
"learning_rate": 3.90359168241966e-05, |
|
"loss": 1.1605, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.9642857142857143, |
|
"grad_norm": 17.860261917114258, |
|
"learning_rate": 3.8185255198487716e-05, |
|
"loss": 1.236, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.5357142857142857, |
|
"eval_f1_macro": 0.5169015312293899, |
|
"eval_f1_micro": 0.5357142857142857, |
|
"eval_f1_weighted": 0.5169015312293898, |
|
"eval_loss": 1.0397837162017822, |
|
"eval_precision_macro": 0.6040472568054893, |
|
"eval_precision_micro": 0.5357142857142857, |
|
"eval_precision_weighted": 0.6040472568054893, |
|
"eval_recall_macro": 0.5357142857142857, |
|
"eval_recall_micro": 0.5357142857142857, |
|
"eval_recall_weighted": 0.5357142857142857, |
|
"eval_runtime": 0.8403, |
|
"eval_samples_per_second": 466.478, |
|
"eval_steps_per_second": 29.75, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.010204081632653, |
|
"grad_norm": 10.591184616088867, |
|
"learning_rate": 3.733459357277883e-05, |
|
"loss": 1.0559, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.0561224489795917, |
|
"grad_norm": 20.194005966186523, |
|
"learning_rate": 3.648393194706995e-05, |
|
"loss": 1.0911, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.1020408163265305, |
|
"grad_norm": 11.607887268066406, |
|
"learning_rate": 3.563327032136106e-05, |
|
"loss": 0.9098, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.1479591836734695, |
|
"grad_norm": 29.328445434570312, |
|
"learning_rate": 3.478260869565218e-05, |
|
"loss": 0.9996, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.193877551020408, |
|
"grad_norm": 12.970704078674316, |
|
"learning_rate": 3.393194706994329e-05, |
|
"loss": 0.8921, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.239795918367347, |
|
"grad_norm": 11.346271514892578, |
|
"learning_rate": 3.308128544423441e-05, |
|
"loss": 0.7828, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 1.2857142857142856, |
|
"grad_norm": 26.86040687561035, |
|
"learning_rate": 3.2230623818525524e-05, |
|
"loss": 0.9659, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.3316326530612246, |
|
"grad_norm": 54.28874969482422, |
|
"learning_rate": 3.137996219281664e-05, |
|
"loss": 0.8242, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 1.3775510204081631, |
|
"grad_norm": 23.010454177856445, |
|
"learning_rate": 3.052930056710775e-05, |
|
"loss": 0.9375, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.4234693877551021, |
|
"grad_norm": 35.56595230102539, |
|
"learning_rate": 2.9678638941398867e-05, |
|
"loss": 0.8834, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 1.469387755102041, |
|
"grad_norm": 20.237417221069336, |
|
"learning_rate": 2.882797731568998e-05, |
|
"loss": 1.2205, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 1.5153061224489797, |
|
"grad_norm": 17.17922019958496, |
|
"learning_rate": 2.7977315689981097e-05, |
|
"loss": 1.1868, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 1.5612244897959182, |
|
"grad_norm": 36.083892822265625, |
|
"learning_rate": 2.7126654064272213e-05, |
|
"loss": 0.7935, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 1.6071428571428572, |
|
"grad_norm": 12.143678665161133, |
|
"learning_rate": 2.6275992438563328e-05, |
|
"loss": 0.8405, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.6530612244897958, |
|
"grad_norm": 17.92725944519043, |
|
"learning_rate": 2.5425330812854444e-05, |
|
"loss": 0.9103, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 1.6989795918367347, |
|
"grad_norm": 8.833161354064941, |
|
"learning_rate": 2.457466918714556e-05, |
|
"loss": 0.8916, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 1.7448979591836735, |
|
"grad_norm": 16.942583084106445, |
|
"learning_rate": 2.3724007561436674e-05, |
|
"loss": 0.8364, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.7908163265306123, |
|
"grad_norm": 52.1734733581543, |
|
"learning_rate": 2.287334593572779e-05, |
|
"loss": 1.0671, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.836734693877551, |
|
"grad_norm": 17.128494262695312, |
|
"learning_rate": 2.2022684310018905e-05, |
|
"loss": 0.7792, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.8826530612244898, |
|
"grad_norm": 11.736278533935547, |
|
"learning_rate": 2.117202268431002e-05, |
|
"loss": 0.7889, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 1.9285714285714286, |
|
"grad_norm": 22.40500831604004, |
|
"learning_rate": 2.0321361058601136e-05, |
|
"loss": 0.8719, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.9744897959183674, |
|
"grad_norm": 7.382483959197998, |
|
"learning_rate": 1.947069943289225e-05, |
|
"loss": 0.7411, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.6454081632653061, |
|
"eval_f1_macro": 0.6383598362124523, |
|
"eval_f1_micro": 0.6454081632653061, |
|
"eval_f1_weighted": 0.6383598362124524, |
|
"eval_loss": 0.9568982720375061, |
|
"eval_precision_macro": 0.6613340870548332, |
|
"eval_precision_micro": 0.6454081632653061, |
|
"eval_precision_weighted": 0.6613340870548333, |
|
"eval_recall_macro": 0.6454081632653061, |
|
"eval_recall_micro": 0.6454081632653061, |
|
"eval_recall_weighted": 0.6454081632653061, |
|
"eval_runtime": 0.8449, |
|
"eval_samples_per_second": 463.965, |
|
"eval_steps_per_second": 29.59, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 2.020408163265306, |
|
"grad_norm": 12.731431007385254, |
|
"learning_rate": 1.8714555765595465e-05, |
|
"loss": 0.6781, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 2.066326530612245, |
|
"grad_norm": 20.548770904541016, |
|
"learning_rate": 1.786389413988658e-05, |
|
"loss": 0.4761, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 2.1122448979591835, |
|
"grad_norm": 22.985334396362305, |
|
"learning_rate": 1.7013232514177692e-05, |
|
"loss": 0.6752, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 2.1581632653061225, |
|
"grad_norm": 22.62578010559082, |
|
"learning_rate": 1.6162570888468808e-05, |
|
"loss": 0.5669, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 2.204081632653061, |
|
"grad_norm": 11.662425994873047, |
|
"learning_rate": 1.5311909262759923e-05, |
|
"loss": 0.4492, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 22.092039108276367, |
|
"learning_rate": 1.446124763705104e-05, |
|
"loss": 0.6171, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 2.295918367346939, |
|
"grad_norm": 18.506088256835938, |
|
"learning_rate": 1.3610586011342156e-05, |
|
"loss": 0.516, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.3418367346938775, |
|
"grad_norm": 50.34038543701172, |
|
"learning_rate": 1.2759924385633271e-05, |
|
"loss": 0.8574, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 2.387755102040816, |
|
"grad_norm": 17.631423950195312, |
|
"learning_rate": 1.1909262759924386e-05, |
|
"loss": 0.5258, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 2.433673469387755, |
|
"grad_norm": 14.761687278747559, |
|
"learning_rate": 1.1058601134215502e-05, |
|
"loss": 0.697, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 2.479591836734694, |
|
"grad_norm": 15.371265411376953, |
|
"learning_rate": 1.0207939508506617e-05, |
|
"loss": 0.6966, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 2.5255102040816326, |
|
"grad_norm": 11.90428638458252, |
|
"learning_rate": 9.357277882797732e-06, |
|
"loss": 0.4714, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 2.571428571428571, |
|
"grad_norm": 20.273618698120117, |
|
"learning_rate": 8.506616257088846e-06, |
|
"loss": 0.4554, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 2.61734693877551, |
|
"grad_norm": 16.07473373413086, |
|
"learning_rate": 7.655954631379962e-06, |
|
"loss": 0.5895, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 2.663265306122449, |
|
"grad_norm": 13.280965805053711, |
|
"learning_rate": 6.805293005671078e-06, |
|
"loss": 0.5932, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 2.7091836734693877, |
|
"grad_norm": 14.767223358154297, |
|
"learning_rate": 5.954631379962193e-06, |
|
"loss": 0.4047, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 2.7551020408163263, |
|
"grad_norm": 12.252891540527344, |
|
"learning_rate": 5.1039697542533085e-06, |
|
"loss": 0.544, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.8010204081632653, |
|
"grad_norm": 17.47493553161621, |
|
"learning_rate": 4.253308128544423e-06, |
|
"loss": 0.535, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 2.8469387755102042, |
|
"grad_norm": 12.823530197143555, |
|
"learning_rate": 3.402646502835539e-06, |
|
"loss": 0.5172, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 2.892857142857143, |
|
"grad_norm": 12.597037315368652, |
|
"learning_rate": 2.5519848771266543e-06, |
|
"loss": 0.5575, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 2.938775510204082, |
|
"grad_norm": 3.2630369663238525, |
|
"learning_rate": 1.7013232514177694e-06, |
|
"loss": 0.5858, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 2.9846938775510203, |
|
"grad_norm": 20.04022789001465, |
|
"learning_rate": 8.506616257088847e-07, |
|
"loss": 0.6399, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.6938775510204082, |
|
"eval_f1_macro": 0.693991401598852, |
|
"eval_f1_micro": 0.6938775510204082, |
|
"eval_f1_weighted": 0.6939914015988521, |
|
"eval_loss": 0.877387523651123, |
|
"eval_precision_macro": 0.6961606483158189, |
|
"eval_precision_micro": 0.6938775510204082, |
|
"eval_precision_weighted": 0.6961606483158188, |
|
"eval_recall_macro": 0.6938775510204082, |
|
"eval_recall_micro": 0.6938775510204082, |
|
"eval_recall_weighted": 0.6938775510204082, |
|
"eval_runtime": 0.8233, |
|
"eval_samples_per_second": 476.156, |
|
"eval_steps_per_second": 30.367, |
|
"step": 588 |
|
} |
|
], |
|
"logging_steps": 9, |
|
"max_steps": 588, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.01 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 308634810052608.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|