|
{ |
|
"best_metric": 0.9676750216076059, |
|
"best_model_checkpoint": "swin-tiny-patch4-window7-224-finetuned-eurosat/checkpoint-4060", |
|
"epoch": 9.981561155500922, |
|
"eval_steps": 500, |
|
"global_step": 4060, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.2315270935960593e-06, |
|
"loss": 0.8581, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.4630541871921186e-06, |
|
"loss": 0.7471, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 3.6945812807881777e-06, |
|
"loss": 0.7525, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.926108374384237e-06, |
|
"loss": 0.7179, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 6.157635467980296e-06, |
|
"loss": 0.769, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 7.3891625615763555e-06, |
|
"loss": 0.6952, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 8.620689655172414e-06, |
|
"loss": 0.8408, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.852216748768475e-06, |
|
"loss": 0.7396, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.1083743842364533e-05, |
|
"loss": 0.8311, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.2315270935960592e-05, |
|
"loss": 0.7802, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.354679802955665e-05, |
|
"loss": 0.7188, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.4778325123152711e-05, |
|
"loss": 0.7815, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.6009852216748768e-05, |
|
"loss": 0.7817, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.7241379310344828e-05, |
|
"loss": 0.7546, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.847290640394089e-05, |
|
"loss": 0.7266, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.970443349753695e-05, |
|
"loss": 0.7962, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.0935960591133006e-05, |
|
"loss": 0.7822, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.2167487684729066e-05, |
|
"loss": 0.8589, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.3399014778325123e-05, |
|
"loss": 0.7298, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.4630541871921184e-05, |
|
"loss": 0.8412, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.5862068965517244e-05, |
|
"loss": 0.8004, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.70935960591133e-05, |
|
"loss": 0.7673, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.8325123152709358e-05, |
|
"loss": 0.7293, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.9556650246305422e-05, |
|
"loss": 0.8035, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 3.078817733990148e-05, |
|
"loss": 0.7597, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.2019704433497536e-05, |
|
"loss": 0.8599, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.3251231527093596e-05, |
|
"loss": 0.7831, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.4482758620689657e-05, |
|
"loss": 0.7714, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.571428571428572e-05, |
|
"loss": 0.8696, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.694581280788178e-05, |
|
"loss": 0.8704, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 3.817733990147783e-05, |
|
"loss": 0.7956, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.94088669950739e-05, |
|
"loss": 0.758, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.064039408866995e-05, |
|
"loss": 0.7565, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.187192118226601e-05, |
|
"loss": 0.8304, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.3103448275862066e-05, |
|
"loss": 0.8016, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.433497536945813e-05, |
|
"loss": 0.8437, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.5566502463054186e-05, |
|
"loss": 0.8352, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.679802955665025e-05, |
|
"loss": 0.7455, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.802955665024631e-05, |
|
"loss": 0.7252, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.926108374384237e-05, |
|
"loss": 0.7629, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.9312013828867761, |
|
"eval_loss": 0.3326094150543213, |
|
"eval_runtime": 27.2379, |
|
"eval_samples_per_second": 212.388, |
|
"eval_steps_per_second": 6.645, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.9945265462506846e-05, |
|
"loss": 0.836, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 4.980842911877395e-05, |
|
"loss": 0.7864, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.9671592775041054e-05, |
|
"loss": 0.8633, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.953475643130816e-05, |
|
"loss": 0.8069, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 4.939792008757526e-05, |
|
"loss": 0.7711, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4.926108374384237e-05, |
|
"loss": 0.7662, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.912424740010947e-05, |
|
"loss": 0.7585, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 4.8987411056376576e-05, |
|
"loss": 0.8457, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 4.885057471264368e-05, |
|
"loss": 0.8136, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 4.8713738368910785e-05, |
|
"loss": 0.8722, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 4.857690202517789e-05, |
|
"loss": 0.7583, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 4.8440065681444994e-05, |
|
"loss": 0.8012, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 4.83032293377121e-05, |
|
"loss": 0.8165, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 4.81663929939792e-05, |
|
"loss": 0.7361, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.802955665024631e-05, |
|
"loss": 0.7531, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.789272030651341e-05, |
|
"loss": 0.823, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.7755883962780516e-05, |
|
"loss": 0.8301, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 4.761904761904762e-05, |
|
"loss": 0.7367, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 4.7482211275314725e-05, |
|
"loss": 0.8218, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 4.734537493158183e-05, |
|
"loss": 0.8237, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 4.7208538587848934e-05, |
|
"loss": 0.8238, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.707170224411604e-05, |
|
"loss": 0.7757, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.693486590038315e-05, |
|
"loss": 0.8156, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.679802955665025e-05, |
|
"loss": 0.8355, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.666119321291735e-05, |
|
"loss": 0.7258, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 4.652435686918446e-05, |
|
"loss": 0.7613, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 4.638752052545156e-05, |
|
"loss": 0.7365, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.6250684181718664e-05, |
|
"loss": 0.7489, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 4.611384783798577e-05, |
|
"loss": 0.781, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 4.597701149425287e-05, |
|
"loss": 0.8505, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 4.5840175150519984e-05, |
|
"loss": 0.7674, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 4.570333880678708e-05, |
|
"loss": 0.8244, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 4.5566502463054186e-05, |
|
"loss": 0.8095, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 4.54296661193213e-05, |
|
"loss": 0.8051, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 4.5292829775588395e-05, |
|
"loss": 0.7743, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 4.5155993431855506e-05, |
|
"loss": 0.8334, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 4.501915708812261e-05, |
|
"loss": 0.829, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 4.488232074438971e-05, |
|
"loss": 0.788, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 4.474548440065682e-05, |
|
"loss": 0.692, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 4.460864805692392e-05, |
|
"loss": 0.7329, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 4.447181171319103e-05, |
|
"loss": 0.8118, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.9450302506482282, |
|
"eval_loss": 0.29584750533103943, |
|
"eval_runtime": 27.0638, |
|
"eval_samples_per_second": 213.754, |
|
"eval_steps_per_second": 6.688, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.433497536945813e-05, |
|
"loss": 0.7741, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 4.419813902572523e-05, |
|
"loss": 0.814, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.406130268199234e-05, |
|
"loss": 0.8301, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.3924466338259446e-05, |
|
"loss": 0.789, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.3787629994526544e-05, |
|
"loss": 0.7549, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.3650793650793655e-05, |
|
"loss": 0.6721, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 4.351395730706076e-05, |
|
"loss": 0.7396, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 4.3377120963327864e-05, |
|
"loss": 0.6782, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 4.324028461959497e-05, |
|
"loss": 0.7289, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 4.3103448275862066e-05, |
|
"loss": 0.712, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 4.296661193212918e-05, |
|
"loss": 0.8466, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 4.282977558839628e-05, |
|
"loss": 0.8502, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 4.2692939244663386e-05, |
|
"loss": 0.6847, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 4.255610290093049e-05, |
|
"loss": 0.8097, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 4.2419266557197594e-05, |
|
"loss": 0.709, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 4.22824302134647e-05, |
|
"loss": 0.665, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 4.21455938697318e-05, |
|
"loss": 0.6726, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 4.200875752599891e-05, |
|
"loss": 0.7391, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 4.187192118226601e-05, |
|
"loss": 0.7706, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 4.1735084838533116e-05, |
|
"loss": 0.8454, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 4.159824849480022e-05, |
|
"loss": 0.7661, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 4.1461412151067325e-05, |
|
"loss": 0.7454, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 4.132457580733443e-05, |
|
"loss": 0.6551, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 4.1187739463601534e-05, |
|
"loss": 0.7453, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 4.105090311986864e-05, |
|
"loss": 0.7144, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 4.091406677613574e-05, |
|
"loss": 0.734, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 4.077723043240285e-05, |
|
"loss": 0.7234, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 4.064039408866995e-05, |
|
"loss": 0.7665, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 4.050355774493706e-05, |
|
"loss": 0.6977, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.036672140120416e-05, |
|
"loss": 0.7377, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.0229885057471265e-05, |
|
"loss": 0.7405, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 4.009304871373837e-05, |
|
"loss": 0.7858, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.9956212370005474e-05, |
|
"loss": 0.6955, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 3.981937602627258e-05, |
|
"loss": 0.8183, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 3.968253968253968e-05, |
|
"loss": 0.7564, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 3.954570333880679e-05, |
|
"loss": 0.7048, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 3.94088669950739e-05, |
|
"loss": 0.7414, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 3.9272030651340996e-05, |
|
"loss": 0.7146, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 3.91351943076081e-05, |
|
"loss": 0.7579, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 3.899835796387521e-05, |
|
"loss": 0.7648, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 3.886152162014231e-05, |
|
"loss": 0.7189, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.9515989628349178, |
|
"eval_loss": 0.25021520256996155, |
|
"eval_runtime": 28.0749, |
|
"eval_samples_per_second": 206.056, |
|
"eval_steps_per_second": 6.447, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 3.872468527640942e-05, |
|
"loss": 0.748, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 3.858784893267652e-05, |
|
"loss": 0.7554, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 3.845101258894362e-05, |
|
"loss": 0.7256, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 3.831417624521073e-05, |
|
"loss": 0.6269, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 3.817733990147783e-05, |
|
"loss": 0.6848, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 3.8040503557744935e-05, |
|
"loss": 0.7929, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 3.7903667214012047e-05, |
|
"loss": 0.7091, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 3.7766830870279144e-05, |
|
"loss": 0.7314, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 3.7629994526546255e-05, |
|
"loss": 0.6781, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 3.749315818281336e-05, |
|
"loss": 0.6571, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 3.735632183908046e-05, |
|
"loss": 0.6719, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 3.721948549534757e-05, |
|
"loss": 0.7128, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 3.7082649151614666e-05, |
|
"loss": 0.6949, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 3.694581280788178e-05, |
|
"loss": 0.6583, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 3.680897646414888e-05, |
|
"loss": 0.7373, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 3.667214012041598e-05, |
|
"loss": 0.707, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 3.653530377668309e-05, |
|
"loss": 0.715, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 3.6398467432950195e-05, |
|
"loss": 0.6595, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 3.62616310892173e-05, |
|
"loss": 0.6768, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 3.6124794745484404e-05, |
|
"loss": 0.6416, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 3.598795840175151e-05, |
|
"loss": 0.8161, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 3.585112205801861e-05, |
|
"loss": 0.7023, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 3.571428571428572e-05, |
|
"loss": 0.6853, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 3.5577449370552815e-05, |
|
"loss": 0.7589, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 3.5440613026819926e-05, |
|
"loss": 0.6967, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 3.530377668308703e-05, |
|
"loss": 0.7275, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 3.5166940339354135e-05, |
|
"loss": 0.7026, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 3.503010399562124e-05, |
|
"loss": 0.6894, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"learning_rate": 3.489326765188834e-05, |
|
"loss": 0.6976, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 3.475643130815545e-05, |
|
"loss": 0.6837, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 3.461959496442255e-05, |
|
"loss": 0.6921, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 3.4482758620689657e-05, |
|
"loss": 0.5885, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 3.434592227695676e-05, |
|
"loss": 0.7013, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 3.4209085933223865e-05, |
|
"loss": 0.6954, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 3.407224958949097e-05, |
|
"loss": 0.7609, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 3.3935413245758074e-05, |
|
"loss": 0.7052, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 3.379857690202518e-05, |
|
"loss": 0.6929, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 3.366174055829228e-05, |
|
"loss": 0.6814, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 3.352490421455939e-05, |
|
"loss": 0.7084, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 3.338806787082649e-05, |
|
"loss": 0.7529, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.9566119273984443, |
|
"eval_loss": 0.2300640344619751, |
|
"eval_runtime": 27.261, |
|
"eval_samples_per_second": 212.208, |
|
"eval_steps_per_second": 6.64, |
|
"step": 1627 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 3.3251231527093596e-05, |
|
"loss": 0.6286, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 3.31143951833607e-05, |
|
"loss": 0.7495, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 3.297755883962781e-05, |
|
"loss": 0.7085, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 3.284072249589491e-05, |
|
"loss": 0.72, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 3.2703886152162014e-05, |
|
"loss": 0.6716, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 3.256704980842912e-05, |
|
"loss": 0.641, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 3.243021346469622e-05, |
|
"loss": 0.6338, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 3.2293377120963334e-05, |
|
"loss": 0.5716, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 3.215654077723043e-05, |
|
"loss": 0.6536, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 3.2019704433497536e-05, |
|
"loss": 0.6454, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 3.188286808976465e-05, |
|
"loss": 0.6392, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 3.1746031746031745e-05, |
|
"loss": 0.6275, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 3.160919540229885e-05, |
|
"loss": 0.6634, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 3.147235905856596e-05, |
|
"loss": 0.6603, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 3.133552271483306e-05, |
|
"loss": 0.704, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 3.119868637110017e-05, |
|
"loss": 0.7157, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 3.1061850027367273e-05, |
|
"loss": 0.6142, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 3.092501368363437e-05, |
|
"loss": 0.5772, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 3.078817733990148e-05, |
|
"loss": 0.6924, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 3.065134099616858e-05, |
|
"loss": 0.6931, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 3.0514504652435688e-05, |
|
"loss": 0.6155, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 3.0377668308702795e-05, |
|
"loss": 0.6732, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 3.0240831964969896e-05, |
|
"loss": 0.6537, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 3.0103995621237e-05, |
|
"loss": 0.6704, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 2.996715927750411e-05, |
|
"loss": 0.6187, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 2.983032293377121e-05, |
|
"loss": 0.6287, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 2.9693486590038317e-05, |
|
"loss": 0.5647, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 2.9556650246305422e-05, |
|
"loss": 0.685, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 2.9419813902572523e-05, |
|
"loss": 0.5978, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 2.928297755883963e-05, |
|
"loss": 0.608, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 2.914614121510673e-05, |
|
"loss": 0.6599, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 2.900930487137384e-05, |
|
"loss": 0.6674, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 2.8872468527640944e-05, |
|
"loss": 0.6663, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 2.8735632183908045e-05, |
|
"loss": 0.6524, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 2.8598795840175153e-05, |
|
"loss": 0.6612, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 2.8461959496442257e-05, |
|
"loss": 0.7027, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 2.8325123152709358e-05, |
|
"loss": 0.6129, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 2.8188286808976466e-05, |
|
"loss": 0.7099, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 2.8051450465243574e-05, |
|
"loss": 0.5781, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 2.7914614121510675e-05, |
|
"loss": 0.6978, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 2.777777777777778e-05, |
|
"loss": 0.6746, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_accuracy": 0.957476231633535, |
|
"eval_loss": 0.21456871926784515, |
|
"eval_runtime": 27.8285, |
|
"eval_samples_per_second": 207.881, |
|
"eval_steps_per_second": 6.504, |
|
"step": 2033 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"learning_rate": 2.764094143404488e-05, |
|
"loss": 0.6939, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 2.7504105090311988e-05, |
|
"loss": 0.5992, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 2.7367268746579096e-05, |
|
"loss": 0.6252, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 2.7230432402846197e-05, |
|
"loss": 0.6173, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 5.11, |
|
"learning_rate": 2.70935960591133e-05, |
|
"loss": 0.6292, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 2.695675971538041e-05, |
|
"loss": 0.6444, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 2.681992337164751e-05, |
|
"loss": 0.6745, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 2.6683087027914618e-05, |
|
"loss": 0.6157, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 2.6546250684181722e-05, |
|
"loss": 0.5988, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 2.6409414340448823e-05, |
|
"loss": 0.6707, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"learning_rate": 2.627257799671593e-05, |
|
"loss": 0.5951, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"learning_rate": 2.6135741652983032e-05, |
|
"loss": 0.5863, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 2.5998905309250136e-05, |
|
"loss": 0.6182, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"learning_rate": 2.5862068965517244e-05, |
|
"loss": 0.6163, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 2.5725232621784345e-05, |
|
"loss": 0.6525, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 2.5588396278051453e-05, |
|
"loss": 0.6545, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 2.5451559934318557e-05, |
|
"loss": 0.6883, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 2.531472359058566e-05, |
|
"loss": 0.6644, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"learning_rate": 2.5177887246852766e-05, |
|
"loss": 0.641, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 2.5041050903119874e-05, |
|
"loss": 0.6029, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 2.4904214559386975e-05, |
|
"loss": 0.6049, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 2.476737821565408e-05, |
|
"loss": 0.6282, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 2.4630541871921184e-05, |
|
"loss": 0.5836, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"learning_rate": 2.4493705528188288e-05, |
|
"loss": 0.6288, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 2.4356869184455393e-05, |
|
"loss": 0.5648, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 2.4220032840722497e-05, |
|
"loss": 0.5689, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"learning_rate": 2.40831964969896e-05, |
|
"loss": 0.6038, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 2.3946360153256706e-05, |
|
"loss": 0.5866, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"learning_rate": 2.380952380952381e-05, |
|
"loss": 0.6283, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 2.3672687465790915e-05, |
|
"loss": 0.6445, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 2.353585112205802e-05, |
|
"loss": 0.6545, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 2.3399014778325123e-05, |
|
"loss": 0.6394, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 2.326217843459223e-05, |
|
"loss": 0.5928, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 2.3125342090859332e-05, |
|
"loss": 0.6121, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 2.2988505747126437e-05, |
|
"loss": 0.6055, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 2.285166940339354e-05, |
|
"loss": 0.5902, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 2.271483305966065e-05, |
|
"loss": 0.5764, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"learning_rate": 2.2577996715927753e-05, |
|
"loss": 0.5722, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"learning_rate": 2.2441160372194854e-05, |
|
"loss": 0.5832, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 2.230432402846196e-05, |
|
"loss": 0.5632, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 2.2167487684729066e-05, |
|
"loss": 0.546, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_accuracy": 0.9609334485738981, |
|
"eval_loss": 0.2027408331632614, |
|
"eval_runtime": 27.561, |
|
"eval_samples_per_second": 209.898, |
|
"eval_steps_per_second": 6.567, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"learning_rate": 2.203065134099617e-05, |
|
"loss": 0.5745, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 2.1893814997263272e-05, |
|
"loss": 0.5959, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"learning_rate": 2.175697865353038e-05, |
|
"loss": 0.6355, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 2.1620142309797484e-05, |
|
"loss": 0.5944, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 2.148330596606459e-05, |
|
"loss": 0.5942, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"learning_rate": 2.1346469622331693e-05, |
|
"loss": 0.5985, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 2.1209633278598797e-05, |
|
"loss": 0.5785, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"learning_rate": 2.10727969348659e-05, |
|
"loss": 0.5717, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 2.0935960591133006e-05, |
|
"loss": 0.6025, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"learning_rate": 2.079912424740011e-05, |
|
"loss": 0.5668, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"learning_rate": 2.0662287903667215e-05, |
|
"loss": 0.5703, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 2.052545155993432e-05, |
|
"loss": 0.6261, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"learning_rate": 2.0388615216201424e-05, |
|
"loss": 0.4868, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 2.025177887246853e-05, |
|
"loss": 0.6238, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"learning_rate": 2.0114942528735632e-05, |
|
"loss": 0.5533, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 1.9978106185002737e-05, |
|
"loss": 0.5589, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 6.42, |
|
"learning_rate": 1.984126984126984e-05, |
|
"loss": 0.6414, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"learning_rate": 1.970443349753695e-05, |
|
"loss": 0.5837, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"learning_rate": 1.956759715380405e-05, |
|
"loss": 0.5744, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 1.9430760810071154e-05, |
|
"loss": 0.5979, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"learning_rate": 1.929392446633826e-05, |
|
"loss": 0.5642, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"learning_rate": 1.9157088122605367e-05, |
|
"loss": 0.508, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 1.9020251778872468e-05, |
|
"loss": 0.6201, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"learning_rate": 1.8883415435139572e-05, |
|
"loss": 0.6078, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"learning_rate": 1.874657909140668e-05, |
|
"loss": 0.6139, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 1.8609742747673784e-05, |
|
"loss": 0.5885, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 1.847290640394089e-05, |
|
"loss": 0.5809, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 1.833607006020799e-05, |
|
"loss": 0.5503, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"learning_rate": 1.8199233716475097e-05, |
|
"loss": 0.5675, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 6.74, |
|
"learning_rate": 1.8062397372742202e-05, |
|
"loss": 0.554, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"learning_rate": 1.7925561029009306e-05, |
|
"loss": 0.6173, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"learning_rate": 1.7788724685276407e-05, |
|
"loss": 0.5588, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 1.7651888341543515e-05, |
|
"loss": 0.5736, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"learning_rate": 1.751505199781062e-05, |
|
"loss": 0.5748, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"learning_rate": 1.7378215654077724e-05, |
|
"loss": 0.6228, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 1.7241379310344828e-05, |
|
"loss": 0.635, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"learning_rate": 1.7104542966611933e-05, |
|
"loss": 0.5662, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 1.6967706622879037e-05, |
|
"loss": 0.6354, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"learning_rate": 1.683087027914614e-05, |
|
"loss": 0.5635, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"learning_rate": 1.6694033935413246e-05, |
|
"loss": 0.5983, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_accuracy": 0.9640449438202248, |
|
"eval_loss": 0.19191542267799377, |
|
"eval_runtime": 27.0395, |
|
"eval_samples_per_second": 213.946, |
|
"eval_steps_per_second": 6.694, |
|
"step": 2847 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"learning_rate": 1.655719759168035e-05, |
|
"loss": 0.5661, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 1.6420361247947455e-05, |
|
"loss": 0.5409, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 7.06, |
|
"learning_rate": 1.628352490421456e-05, |
|
"loss": 0.5863, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"learning_rate": 1.6146688560481667e-05, |
|
"loss": 0.5587, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"learning_rate": 1.6009852216748768e-05, |
|
"loss": 0.5921, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"learning_rate": 1.5873015873015872e-05, |
|
"loss": 0.5591, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"learning_rate": 1.573617952928298e-05, |
|
"loss": 0.6061, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"learning_rate": 1.5599343185550085e-05, |
|
"loss": 0.5408, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"learning_rate": 1.5462506841817186e-05, |
|
"loss": 0.5585, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"learning_rate": 1.532567049808429e-05, |
|
"loss": 0.5551, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 1.5188834154351398e-05, |
|
"loss": 0.5976, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 7.28, |
|
"learning_rate": 1.50519978106185e-05, |
|
"loss": 0.5359, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"learning_rate": 1.4915161466885605e-05, |
|
"loss": 0.5213, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"learning_rate": 1.4778325123152711e-05, |
|
"loss": 0.552, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 7.35, |
|
"learning_rate": 1.4641488779419815e-05, |
|
"loss": 0.5372, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"learning_rate": 1.450465243568692e-05, |
|
"loss": 0.61, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 7.4, |
|
"learning_rate": 1.4367816091954022e-05, |
|
"loss": 0.5742, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"learning_rate": 1.4230979748221129e-05, |
|
"loss": 0.5856, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"learning_rate": 1.4094143404488233e-05, |
|
"loss": 0.5193, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"learning_rate": 1.3957307060755337e-05, |
|
"loss": 0.5345, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 1.382047071702244e-05, |
|
"loss": 0.5558, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"learning_rate": 1.3683634373289548e-05, |
|
"loss": 0.58, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"learning_rate": 1.354679802955665e-05, |
|
"loss": 0.5425, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"learning_rate": 1.3409961685823755e-05, |
|
"loss": 0.6588, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"learning_rate": 1.3273125342090861e-05, |
|
"loss": 0.5137, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 7.62, |
|
"learning_rate": 1.3136288998357965e-05, |
|
"loss": 0.5975, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"learning_rate": 1.2999452654625068e-05, |
|
"loss": 0.5728, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 7.67, |
|
"learning_rate": 1.2862616310892173e-05, |
|
"loss": 0.5112, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 7.7, |
|
"learning_rate": 1.2725779967159279e-05, |
|
"loss": 0.5435, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"learning_rate": 1.2588943623426383e-05, |
|
"loss": 0.5792, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 7.74, |
|
"learning_rate": 1.2452107279693487e-05, |
|
"loss": 0.5114, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 7.77, |
|
"learning_rate": 1.2315270935960592e-05, |
|
"loss": 0.5976, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"learning_rate": 1.2178434592227696e-05, |
|
"loss": 0.5955, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 7.82, |
|
"learning_rate": 1.20415982484948e-05, |
|
"loss": 0.5476, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"learning_rate": 1.1904761904761905e-05, |
|
"loss": 0.5329, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"learning_rate": 1.176792556102901e-05, |
|
"loss": 0.5765, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 7.89, |
|
"learning_rate": 1.1631089217296116e-05, |
|
"loss": 0.6101, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"learning_rate": 1.1494252873563218e-05, |
|
"loss": 0.673, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 7.94, |
|
"learning_rate": 1.1357416529830324e-05, |
|
"loss": 0.4893, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 1.1220580186097427e-05, |
|
"loss": 0.4964, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"learning_rate": 1.1083743842364533e-05, |
|
"loss": 0.5653, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_accuracy": 0.9652549697493518, |
|
"eval_loss": 0.18624082207679749, |
|
"eval_runtime": 27.8689, |
|
"eval_samples_per_second": 207.579, |
|
"eval_steps_per_second": 6.495, |
|
"step": 3254 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"learning_rate": 1.0946907498631636e-05, |
|
"loss": 0.5571, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 8.04, |
|
"learning_rate": 1.0810071154898742e-05, |
|
"loss": 0.5585, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"learning_rate": 1.0673234811165846e-05, |
|
"loss": 0.5709, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"learning_rate": 1.053639846743295e-05, |
|
"loss": 0.5892, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"learning_rate": 1.0399562123700055e-05, |
|
"loss": 0.5463, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 8.14, |
|
"learning_rate": 1.026272577996716e-05, |
|
"loss": 0.5466, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"learning_rate": 1.0125889436234266e-05, |
|
"loss": 0.5939, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 8.19, |
|
"learning_rate": 9.989053092501368e-06, |
|
"loss": 0.5314, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 8.21, |
|
"learning_rate": 9.852216748768475e-06, |
|
"loss": 0.4986, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"learning_rate": 9.715380405035577e-06, |
|
"loss": 0.622, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 8.26, |
|
"learning_rate": 9.578544061302683e-06, |
|
"loss": 0.5836, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 8.29, |
|
"learning_rate": 9.441707717569786e-06, |
|
"loss": 0.5582, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 8.31, |
|
"learning_rate": 9.304871373836892e-06, |
|
"loss": 0.5248, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 8.33, |
|
"learning_rate": 9.168035030103995e-06, |
|
"loss": 0.5676, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 8.36, |
|
"learning_rate": 9.031198686371101e-06, |
|
"loss": 0.5659, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"learning_rate": 8.894362342638204e-06, |
|
"loss": 0.5762, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 8.41, |
|
"learning_rate": 8.75752599890531e-06, |
|
"loss": 0.5365, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 8.43, |
|
"learning_rate": 8.620689655172414e-06, |
|
"loss": 0.5394, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 8.46, |
|
"learning_rate": 8.483853311439519e-06, |
|
"loss": 0.5327, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 8.48, |
|
"learning_rate": 8.347016967706623e-06, |
|
"loss": 0.5503, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"learning_rate": 8.210180623973727e-06, |
|
"loss": 0.4698, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 8.53, |
|
"learning_rate": 8.073344280240833e-06, |
|
"loss": 0.5264, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 8.56, |
|
"learning_rate": 7.936507936507936e-06, |
|
"loss": 0.5127, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 8.58, |
|
"learning_rate": 7.799671592775042e-06, |
|
"loss": 0.5655, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 8.6, |
|
"learning_rate": 7.662835249042145e-06, |
|
"loss": 0.5611, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 8.63, |
|
"learning_rate": 7.52599890530925e-06, |
|
"loss": 0.5306, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 8.65, |
|
"learning_rate": 7.3891625615763555e-06, |
|
"loss": 0.531, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 8.68, |
|
"learning_rate": 7.25232621784346e-06, |
|
"loss": 0.5006, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 8.7, |
|
"learning_rate": 7.115489874110564e-06, |
|
"loss": 0.5036, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 8.73, |
|
"learning_rate": 6.978653530377669e-06, |
|
"loss": 0.5519, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"learning_rate": 6.841817186644774e-06, |
|
"loss": 0.5233, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 8.78, |
|
"learning_rate": 6.7049808429118775e-06, |
|
"loss": 0.567, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"learning_rate": 6.568144499178983e-06, |
|
"loss": 0.4994, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 8.83, |
|
"learning_rate": 6.431308155446086e-06, |
|
"loss": 0.5869, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 8.85, |
|
"learning_rate": 6.2944718117131915e-06, |
|
"loss": 0.5738, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 8.88, |
|
"learning_rate": 6.157635467980296e-06, |
|
"loss": 0.4842, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 8.9, |
|
"learning_rate": 6.0207991242474e-06, |
|
"loss": 0.5135, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 8.92, |
|
"learning_rate": 5.883962780514505e-06, |
|
"loss": 0.5603, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 8.95, |
|
"learning_rate": 5.747126436781609e-06, |
|
"loss": 0.5068, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 8.97, |
|
"learning_rate": 5.6102900930487136e-06, |
|
"loss": 0.5566, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"learning_rate": 5.473453749315818e-06, |
|
"loss": 0.5361, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_accuracy": 0.9659464131374244, |
|
"eval_loss": 0.18145236372947693, |
|
"eval_runtime": 28.6712, |
|
"eval_samples_per_second": 201.771, |
|
"eval_steps_per_second": 6.313, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 9.02, |
|
"learning_rate": 5.336617405582923e-06, |
|
"loss": 0.5322, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 9.05, |
|
"learning_rate": 5.199781061850028e-06, |
|
"loss": 0.5509, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 9.07, |
|
"learning_rate": 5.062944718117133e-06, |
|
"loss": 0.5806, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 9.1, |
|
"learning_rate": 4.926108374384237e-06, |
|
"loss": 0.4942, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 9.12, |
|
"learning_rate": 4.789272030651342e-06, |
|
"loss": 0.5086, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 9.15, |
|
"learning_rate": 4.652435686918446e-06, |
|
"loss": 0.5668, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 9.17, |
|
"learning_rate": 4.5155993431855505e-06, |
|
"loss": 0.5554, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 9.19, |
|
"learning_rate": 4.378762999452655e-06, |
|
"loss": 0.5122, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 9.22, |
|
"learning_rate": 4.241926655719759e-06, |
|
"loss": 0.5301, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 9.24, |
|
"learning_rate": 4.105090311986864e-06, |
|
"loss": 0.5617, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 9.27, |
|
"learning_rate": 3.968253968253968e-06, |
|
"loss": 0.5423, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 9.29, |
|
"learning_rate": 3.8314176245210725e-06, |
|
"loss": 0.6171, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 9.32, |
|
"learning_rate": 3.6945812807881777e-06, |
|
"loss": 0.498, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"learning_rate": 3.557744937055282e-06, |
|
"loss": 0.5354, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 9.37, |
|
"learning_rate": 3.420908593322387e-06, |
|
"loss": 0.5846, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 9.39, |
|
"learning_rate": 3.2840722495894914e-06, |
|
"loss": 0.502, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 9.42, |
|
"learning_rate": 3.1472359058565958e-06, |
|
"loss": 0.5138, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 9.44, |
|
"learning_rate": 3.0103995621237e-06, |
|
"loss": 0.5014, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 9.47, |
|
"learning_rate": 2.8735632183908046e-06, |
|
"loss": 0.4884, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 9.49, |
|
"learning_rate": 2.736726874657909e-06, |
|
"loss": 0.6063, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 9.51, |
|
"learning_rate": 2.599890530925014e-06, |
|
"loss": 0.5386, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 9.54, |
|
"learning_rate": 2.4630541871921186e-06, |
|
"loss": 0.5255, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 9.56, |
|
"learning_rate": 2.326217843459223e-06, |
|
"loss": 0.5019, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"learning_rate": 2.1893814997263274e-06, |
|
"loss": 0.503, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 9.61, |
|
"learning_rate": 2.052545155993432e-06, |
|
"loss": 0.5699, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 9.64, |
|
"learning_rate": 1.9157088122605362e-06, |
|
"loss": 0.4565, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 9.66, |
|
"learning_rate": 1.778872468527641e-06, |
|
"loss": 0.5098, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 9.69, |
|
"learning_rate": 1.6420361247947457e-06, |
|
"loss": 0.5032, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 9.71, |
|
"learning_rate": 1.50519978106185e-06, |
|
"loss": 0.4713, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 9.74, |
|
"learning_rate": 1.3683634373289545e-06, |
|
"loss": 0.4554, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 9.76, |
|
"learning_rate": 1.2315270935960593e-06, |
|
"loss": 0.4773, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 9.78, |
|
"learning_rate": 1.0946907498631637e-06, |
|
"loss": 0.5536, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 9.81, |
|
"learning_rate": 9.578544061302681e-07, |
|
"loss": 0.4913, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 9.83, |
|
"learning_rate": 8.210180623973728e-07, |
|
"loss": 0.5538, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 9.86, |
|
"learning_rate": 6.841817186644772e-07, |
|
"loss": 0.4838, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 9.88, |
|
"learning_rate": 5.473453749315819e-07, |
|
"loss": 0.569, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"learning_rate": 4.105090311986864e-07, |
|
"loss": 0.5847, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 9.93, |
|
"learning_rate": 2.7367268746579093e-07, |
|
"loss": 0.5507, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 9.96, |
|
"learning_rate": 1.3683634373289546e-07, |
|
"loss": 0.5834, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 9.98, |
|
"learning_rate": 0.0, |
|
"loss": 0.5017, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 9.98, |
|
"eval_accuracy": 0.9676750216076059, |
|
"eval_loss": 0.17719660699367523, |
|
"eval_runtime": 28.9267, |
|
"eval_samples_per_second": 199.989, |
|
"eval_steps_per_second": 6.257, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 9.98, |
|
"step": 4060, |
|
"total_flos": 1.3140619208067262e+19, |
|
"train_loss": 0.6519044913681857, |
|
"train_runtime": 4257.1599, |
|
"train_samples_per_second": 122.29, |
|
"train_steps_per_second": 0.954 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 4060, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 10, |
|
"save_steps": 500, |
|
"total_flos": 1.3140619208067262e+19, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|