|
{ |
|
"best_metric": 0.8289, |
|
"best_model_checkpoint": "swin-tiny-patch4-window7-224-finetuned-eurosat\\checkpoint-2109", |
|
"epoch": 2.998933522929257, |
|
"global_step": 2109, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.3696682464454976e-06, |
|
"loss": 5.3748, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.739336492890995e-06, |
|
"loss": 5.3569, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 7.109004739336493e-06, |
|
"loss": 5.3245, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 9.47867298578199e-06, |
|
"loss": 5.3197, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.184834123222749e-05, |
|
"loss": 5.3088, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.4218009478672985e-05, |
|
"loss": 5.2824, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.6587677725118483e-05, |
|
"loss": 5.2667, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.895734597156398e-05, |
|
"loss": 5.2398, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2.132701421800948e-05, |
|
"loss": 5.1939, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 2.369668246445498e-05, |
|
"loss": 5.1625, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 2.6066350710900477e-05, |
|
"loss": 5.1027, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 2.843601895734597e-05, |
|
"loss": 5.0203, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 3.080568720379147e-05, |
|
"loss": 4.9328, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 3.3175355450236966e-05, |
|
"loss": 4.7904, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.554502369668247e-05, |
|
"loss": 4.6625, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 3.791469194312796e-05, |
|
"loss": 4.4841, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.028436018957346e-05, |
|
"loss": 4.2482, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.265402843601896e-05, |
|
"loss": 4.0752, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.502369668246446e-05, |
|
"loss": 3.9379, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.739336492890996e-05, |
|
"loss": 3.734, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.976303317535545e-05, |
|
"loss": 3.5028, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.976290832455216e-05, |
|
"loss": 3.4281, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.949947312961012e-05, |
|
"loss": 3.2753, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.923603793466807e-05, |
|
"loss": 3.0507, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.8972602739726034e-05, |
|
"loss": 2.9402, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.8709167544783986e-05, |
|
"loss": 2.8824, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.8445732349841945e-05, |
|
"loss": 2.6848, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.8182297154899896e-05, |
|
"loss": 2.6702, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.791886195995785e-05, |
|
"loss": 2.6096, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.7655426765015806e-05, |
|
"loss": 2.5345, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.7391991570073765e-05, |
|
"loss": 2.5112, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.712855637513172e-05, |
|
"loss": 2.4971, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.6865121180189675e-05, |
|
"loss": 2.3219, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.6601685985247633e-05, |
|
"loss": 2.4026, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.6338250790305585e-05, |
|
"loss": 2.2161, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.6074815595363544e-05, |
|
"loss": 2.2615, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.58113804004215e-05, |
|
"loss": 2.2194, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.5547945205479454e-05, |
|
"loss": 2.2824, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.528451001053741e-05, |
|
"loss": 2.1638, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.5021074815595364e-05, |
|
"loss": 2.0788, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.4757639620653316e-05, |
|
"loss": 2.0187, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.449420442571128e-05, |
|
"loss": 2.0782, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.423076923076923e-05, |
|
"loss": 2.0265, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.396733403582719e-05, |
|
"loss": 1.9744, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.370389884088514e-05, |
|
"loss": 2.006, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.34404636459431e-05, |
|
"loss": 2.0586, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.317702845100105e-05, |
|
"loss": 1.9814, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.291359325605901e-05, |
|
"loss": 1.9808, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.265015806111697e-05, |
|
"loss": 1.9932, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.238672286617492e-05, |
|
"loss": 1.9604, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.212328767123288e-05, |
|
"loss": 1.8877, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.185985247629083e-05, |
|
"loss": 1.884, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.159641728134879e-05, |
|
"loss": 1.9931, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.133298208640675e-05, |
|
"loss": 1.9496, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.10695468914647e-05, |
|
"loss": 1.7859, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.080611169652266e-05, |
|
"loss": 1.8249, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.054267650158061e-05, |
|
"loss": 1.7965, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.027924130663857e-05, |
|
"loss": 1.8011, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.001580611169653e-05, |
|
"loss": 1.7976, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.975237091675448e-05, |
|
"loss": 1.7056, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 3.948893572181244e-05, |
|
"loss": 1.9267, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.922550052687039e-05, |
|
"loss": 1.7014, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.896206533192835e-05, |
|
"loss": 1.8617, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.86986301369863e-05, |
|
"loss": 1.8402, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.843519494204426e-05, |
|
"loss": 1.7088, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.8171759747102217e-05, |
|
"loss": 1.8482, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.790832455216017e-05, |
|
"loss": 1.7017, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.764488935721813e-05, |
|
"loss": 1.8167, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.738145416227608e-05, |
|
"loss": 1.7691, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.711801896733404e-05, |
|
"loss": 1.7922, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.7804, |
|
"eval_loss": 0.8853043913841248, |
|
"eval_runtime": 39.7328, |
|
"eval_samples_per_second": 251.681, |
|
"eval_steps_per_second": 7.878, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.6854583772391995e-05, |
|
"loss": 1.5741, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6591148577449954e-05, |
|
"loss": 1.6612, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.6327713382507905e-05, |
|
"loss": 1.6439, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.606427818756586e-05, |
|
"loss": 1.6165, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.5800842992623816e-05, |
|
"loss": 1.6161, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.5537407797681774e-05, |
|
"loss": 1.5302, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.527397260273973e-05, |
|
"loss": 1.6897, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.5010537407797684e-05, |
|
"loss": 1.6632, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.4747102212855636e-05, |
|
"loss": 1.5924, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.4483667017913594e-05, |
|
"loss": 1.6301, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.4220231822971546e-05, |
|
"loss": 1.6031, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.3956796628029505e-05, |
|
"loss": 1.7069, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.369336143308746e-05, |
|
"loss": 1.6232, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.342992623814542e-05, |
|
"loss": 1.6028, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.316649104320337e-05, |
|
"loss": 1.5437, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 3.2903055848261325e-05, |
|
"loss": 1.534, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 3.2639620653319283e-05, |
|
"loss": 1.5398, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 3.237618545837724e-05, |
|
"loss": 1.4951, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 3.21127502634352e-05, |
|
"loss": 1.6149, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 3.184931506849315e-05, |
|
"loss": 1.6144, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 3.1585879873551104e-05, |
|
"loss": 1.5772, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 3.132244467860906e-05, |
|
"loss": 1.4824, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 3.105900948366702e-05, |
|
"loss": 1.5298, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 3.079557428872498e-05, |
|
"loss": 1.5792, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 3.053213909378293e-05, |
|
"loss": 1.5499, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 3.026870389884089e-05, |
|
"loss": 1.5453, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 3.000526870389884e-05, |
|
"loss": 1.5818, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.9741833508956796e-05, |
|
"loss": 1.4333, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.9478398314014755e-05, |
|
"loss": 1.5632, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.921496311907271e-05, |
|
"loss": 1.525, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.8951527924130668e-05, |
|
"loss": 1.5256, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.868809272918862e-05, |
|
"loss": 1.5352, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.842465753424658e-05, |
|
"loss": 1.4807, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.8161222339304533e-05, |
|
"loss": 1.5398, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.7897787144362485e-05, |
|
"loss": 1.559, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.7634351949420444e-05, |
|
"loss": 1.5625, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.73709167544784e-05, |
|
"loss": 1.4423, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.7107481559536357e-05, |
|
"loss": 1.4928, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.6844046364594312e-05, |
|
"loss": 1.6027, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.6580611169652264e-05, |
|
"loss": 1.4623, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.6317175974710222e-05, |
|
"loss": 1.6102, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.6053740779768177e-05, |
|
"loss": 1.4465, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.5790305584826136e-05, |
|
"loss": 1.4764, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.5526870389884088e-05, |
|
"loss": 1.5132, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.5263435194942046e-05, |
|
"loss": 1.5237, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.5e-05, |
|
"loss": 1.5527, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.4736564805057956e-05, |
|
"loss": 1.4051, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.4473129610115915e-05, |
|
"loss": 1.6083, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.420969441517387e-05, |
|
"loss": 1.4604, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.394625922023182e-05, |
|
"loss": 1.5632, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.368282402528978e-05, |
|
"loss": 1.5384, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.3419388830347735e-05, |
|
"loss": 1.4031, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.315595363540569e-05, |
|
"loss": 1.5656, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.289251844046365e-05, |
|
"loss": 1.3992, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.2629083245521604e-05, |
|
"loss": 1.4831, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.236564805057956e-05, |
|
"loss": 1.4081, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 2.2102212855637514e-05, |
|
"loss": 1.5012, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 2.183877766069547e-05, |
|
"loss": 1.4697, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 2.1575342465753427e-05, |
|
"loss": 1.4538, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 2.1311907270811383e-05, |
|
"loss": 1.372, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 2.1048472075869338e-05, |
|
"loss": 1.5106, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 2.0785036880927293e-05, |
|
"loss": 1.518, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 2.0521601685985248e-05, |
|
"loss": 1.4169, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 2.0258166491043203e-05, |
|
"loss": 1.4372, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.999473129610116e-05, |
|
"loss": 1.5191, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.9731296101159116e-05, |
|
"loss": 1.4005, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.946786090621707e-05, |
|
"loss": 1.4019, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.9204425711275027e-05, |
|
"loss": 1.4445, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.894099051633298e-05, |
|
"loss": 1.3176, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.8677555321390937e-05, |
|
"loss": 1.4677, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.8124, |
|
"eval_loss": 0.7063834071159363, |
|
"eval_runtime": 34.1843, |
|
"eval_samples_per_second": 292.532, |
|
"eval_steps_per_second": 9.156, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.8414120126448895e-05, |
|
"loss": 1.4671, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.815068493150685e-05, |
|
"loss": 1.2756, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.7887249736564805e-05, |
|
"loss": 1.3293, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.7623814541622764e-05, |
|
"loss": 1.3382, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.7360379346680716e-05, |
|
"loss": 1.4155, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.7096944151738674e-05, |
|
"loss": 1.331, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.683350895679663e-05, |
|
"loss": 1.4016, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.6570073761854584e-05, |
|
"loss": 1.361, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.630663856691254e-05, |
|
"loss": 1.3983, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.6043203371970498e-05, |
|
"loss": 1.3422, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.577976817702845e-05, |
|
"loss": 1.3971, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.5516332982086408e-05, |
|
"loss": 1.422, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.5252897787144363e-05, |
|
"loss": 1.4001, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.498946259220232e-05, |
|
"loss": 1.329, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.4726027397260275e-05, |
|
"loss": 1.3531, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.4462592202318232e-05, |
|
"loss": 1.3514, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.4199157007376185e-05, |
|
"loss": 1.32, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.3935721812434142e-05, |
|
"loss": 1.4405, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.3672286617492097e-05, |
|
"loss": 1.4477, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.3408851422550054e-05, |
|
"loss": 1.3322, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.3145416227608009e-05, |
|
"loss": 1.3451, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.2881981032665966e-05, |
|
"loss": 1.3795, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.2618545837723922e-05, |
|
"loss": 1.2765, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.2355110642781877e-05, |
|
"loss": 1.3775, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.209167544783983e-05, |
|
"loss": 1.2989, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.1828240252897788e-05, |
|
"loss": 1.4062, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.1564805057955744e-05, |
|
"loss": 1.329, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.1301369863013698e-05, |
|
"loss": 1.3265, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.1037934668071655e-05, |
|
"loss": 1.3261, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.0774499473129611e-05, |
|
"loss": 1.3631, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.0511064278187566e-05, |
|
"loss": 1.2648, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.0247629083245521e-05, |
|
"loss": 1.3818, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 9.984193888303478e-06, |
|
"loss": 1.3628, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 9.720758693361433e-06, |
|
"loss": 1.3429, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 9.457323498419388e-06, |
|
"loss": 1.4293, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 9.193888303477345e-06, |
|
"loss": 1.2913, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 8.930453108535302e-06, |
|
"loss": 1.3782, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 8.667017913593255e-06, |
|
"loss": 1.2975, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 8.403582718651212e-06, |
|
"loss": 1.3039, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 8.140147523709169e-06, |
|
"loss": 1.291, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.876712328767124e-06, |
|
"loss": 1.3099, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 7.613277133825079e-06, |
|
"loss": 1.2919, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 7.349841938883036e-06, |
|
"loss": 1.2977, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 7.08640674394099e-06, |
|
"loss": 1.3157, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 6.822971548998947e-06, |
|
"loss": 1.3704, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 6.559536354056903e-06, |
|
"loss": 1.4022, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.296101159114858e-06, |
|
"loss": 1.3697, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.032665964172814e-06, |
|
"loss": 1.3926, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.76923076923077e-06, |
|
"loss": 1.3484, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.505795574288726e-06, |
|
"loss": 1.3312, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 5.242360379346681e-06, |
|
"loss": 1.3344, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.978925184404637e-06, |
|
"loss": 1.2542, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.715489989462593e-06, |
|
"loss": 1.31, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 4.452054794520548e-06, |
|
"loss": 1.2692, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 4.188619599578504e-06, |
|
"loss": 1.3602, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 3.92518440463646e-06, |
|
"loss": 1.2637, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.661749209694415e-06, |
|
"loss": 1.2901, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.398314014752371e-06, |
|
"loss": 1.2764, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 3.1348788198103265e-06, |
|
"loss": 1.3211, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 2.8714436248682825e-06, |
|
"loss": 1.3353, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 2.6080084299262384e-06, |
|
"loss": 1.2377, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.3445732349841943e-06, |
|
"loss": 1.3447, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 2.08113804004215e-06, |
|
"loss": 1.3155, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.8177028451001056e-06, |
|
"loss": 1.4133, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.554267650158061e-06, |
|
"loss": 1.2132, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.290832455216017e-06, |
|
"loss": 1.3686, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.0273972602739725e-06, |
|
"loss": 1.341, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 7.639620653319284e-07, |
|
"loss": 1.305, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 5.00526870389884e-07, |
|
"loss": 1.3064, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 2.3709167544783985e-07, |
|
"loss": 1.3005, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.8289, |
|
"eval_loss": 0.6467128992080688, |
|
"eval_runtime": 37.586, |
|
"eval_samples_per_second": 266.057, |
|
"eval_steps_per_second": 8.328, |
|
"step": 2109 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 2109, |
|
"total_flos": 6.74624745043329e+18, |
|
"train_loss": 1.9375283018340992, |
|
"train_runtime": 2464.8383, |
|
"train_samples_per_second": 109.541, |
|
"train_steps_per_second": 0.856 |
|
} |
|
], |
|
"max_steps": 2109, |
|
"num_train_epochs": 3, |
|
"total_flos": 6.74624745043329e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|