|
{ |
|
"best_metric": 0.8761682242990654, |
|
"best_model_checkpoint": "vit-base-patch16-224-in21k-finetuned-cassava/checkpoint-4280", |
|
"epoch": 10.0, |
|
"global_step": 5350, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.345794392523364e-07, |
|
"loss": 1.6171, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.8691588785046728e-06, |
|
"loss": 1.6057, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2.8037383177570094e-06, |
|
"loss": 1.5864, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 3.7383177570093455e-06, |
|
"loss": 1.5484, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.6728971962616825e-06, |
|
"loss": 1.4915, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 5.607476635514019e-06, |
|
"loss": 1.4295, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 6.542056074766355e-06, |
|
"loss": 1.306, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 7.476635514018691e-06, |
|
"loss": 1.2262, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 8.411214953271028e-06, |
|
"loss": 1.1393, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.345794392523365e-06, |
|
"loss": 1.0983, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.02803738317757e-05, |
|
"loss": 1.1016, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.1214953271028037e-05, |
|
"loss": 1.0129, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.2149532710280374e-05, |
|
"loss": 1.0331, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.308411214953271e-05, |
|
"loss": 0.9059, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.4018691588785047e-05, |
|
"loss": 0.9411, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.4953271028037382e-05, |
|
"loss": 0.8249, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.588785046728972e-05, |
|
"loss": 0.8729, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.6822429906542056e-05, |
|
"loss": 0.8896, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.775700934579439e-05, |
|
"loss": 0.8313, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.869158878504673e-05, |
|
"loss": 0.7836, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9626168224299065e-05, |
|
"loss": 0.7587, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.05607476635514e-05, |
|
"loss": 0.6824, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.149532710280374e-05, |
|
"loss": 0.7291, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.2429906542056075e-05, |
|
"loss": 0.7577, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.3364485981308414e-05, |
|
"loss": 0.7538, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.429906542056075e-05, |
|
"loss": 0.6234, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.5233644859813084e-05, |
|
"loss": 0.6444, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.616822429906542e-05, |
|
"loss": 0.6348, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.7102803738317755e-05, |
|
"loss": 0.6086, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.8037383177570094e-05, |
|
"loss": 0.6258, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.897196261682243e-05, |
|
"loss": 0.6055, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 2.9906542056074764e-05, |
|
"loss": 0.5749, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.08411214953271e-05, |
|
"loss": 0.595, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.177570093457944e-05, |
|
"loss": 0.541, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 3.2710280373831774e-05, |
|
"loss": 0.597, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 3.364485981308411e-05, |
|
"loss": 0.465, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.457943925233645e-05, |
|
"loss": 0.5728, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.551401869158878e-05, |
|
"loss": 0.5068, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.644859813084112e-05, |
|
"loss": 0.5928, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.738317757009346e-05, |
|
"loss": 0.5488, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 3.831775700934579e-05, |
|
"loss": 0.5565, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 3.925233644859813e-05, |
|
"loss": 0.5147, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.018691588785047e-05, |
|
"loss": 0.5709, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.11214953271028e-05, |
|
"loss": 0.5086, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.205607476635514e-05, |
|
"loss": 0.4941, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.299065420560748e-05, |
|
"loss": 0.492, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.392523364485982e-05, |
|
"loss": 0.522, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.485981308411215e-05, |
|
"loss": 0.5502, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.579439252336449e-05, |
|
"loss": 0.5002, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.672897196261683e-05, |
|
"loss": 0.5522, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.766355140186916e-05, |
|
"loss": 0.5166, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.85981308411215e-05, |
|
"loss": 0.5825, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.9532710280373836e-05, |
|
"loss": 0.5531, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.8336448598130841, |
|
"eval_loss": 0.4937918782234192, |
|
"eval_runtime": 96.0141, |
|
"eval_samples_per_second": 44.577, |
|
"eval_steps_per_second": 5.572, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.994807892004154e-05, |
|
"loss": 0.491, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 4.9844236760124614e-05, |
|
"loss": 0.4679, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 4.974039460020769e-05, |
|
"loss": 0.4584, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 4.963655244029076e-05, |
|
"loss": 0.5556, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.9532710280373836e-05, |
|
"loss": 0.4374, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 4.9428868120456904e-05, |
|
"loss": 0.4499, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 4.9325025960539985e-05, |
|
"loss": 0.5201, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 4.922118380062305e-05, |
|
"loss": 0.6573, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.9117341640706127e-05, |
|
"loss": 0.5029, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 4.901349948078921e-05, |
|
"loss": 0.5457, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 4.8909657320872275e-05, |
|
"loss": 0.492, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 4.880581516095535e-05, |
|
"loss": 0.3407, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 4.8701973001038423e-05, |
|
"loss": 0.5129, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 4.85981308411215e-05, |
|
"loss": 0.4626, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 4.849428868120457e-05, |
|
"loss": 0.5528, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 4.8390446521287646e-05, |
|
"loss": 0.4977, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 4.828660436137072e-05, |
|
"loss": 0.387, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 4.818276220145379e-05, |
|
"loss": 0.5349, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 4.807892004153687e-05, |
|
"loss": 0.4265, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 4.797507788161994e-05, |
|
"loss": 0.4719, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 4.787123572170301e-05, |
|
"loss": 0.5005, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 4.776739356178609e-05, |
|
"loss": 0.5112, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 4.766355140186916e-05, |
|
"loss": 0.5112, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 4.755970924195223e-05, |
|
"loss": 0.3827, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 4.745586708203531e-05, |
|
"loss": 0.5576, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 4.735202492211838e-05, |
|
"loss": 0.3974, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 4.7248182762201456e-05, |
|
"loss": 0.5209, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 4.714434060228453e-05, |
|
"loss": 0.4721, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.7040498442367604e-05, |
|
"loss": 0.385, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.693665628245067e-05, |
|
"loss": 0.4874, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.683281412253375e-05, |
|
"loss": 0.4409, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.672897196261683e-05, |
|
"loss": 0.506, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 4.6625129802699895e-05, |
|
"loss": 0.4446, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 4.6521287642782976e-05, |
|
"loss": 0.4086, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 4.641744548286604e-05, |
|
"loss": 0.3869, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 4.631360332294912e-05, |
|
"loss": 0.395, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 4.620976116303219e-05, |
|
"loss": 0.464, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 4.6105919003115266e-05, |
|
"loss": 0.3581, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 4.600207684319834e-05, |
|
"loss": 0.387, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 4.5898234683281414e-05, |
|
"loss": 0.4066, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 4.579439252336449e-05, |
|
"loss": 0.3997, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 4.569055036344756e-05, |
|
"loss": 0.4705, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 4.558670820353064e-05, |
|
"loss": 0.4063, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 4.548286604361371e-05, |
|
"loss": 0.4227, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 4.537902388369678e-05, |
|
"loss": 0.4844, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 4.527518172377986e-05, |
|
"loss": 0.4631, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 4.517133956386293e-05, |
|
"loss": 0.4847, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 4.5067497403946e-05, |
|
"loss": 0.4112, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 4.496365524402908e-05, |
|
"loss": 0.3964, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 4.485981308411215e-05, |
|
"loss": 0.4341, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 4.4755970924195224e-05, |
|
"loss": 0.5106, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 4.46521287642783e-05, |
|
"loss": 0.4269, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 4.454828660436137e-05, |
|
"loss": 0.413, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 4.4444444444444447e-05, |
|
"loss": 0.4139, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.861214953271028, |
|
"eval_loss": 0.40707165002822876, |
|
"eval_runtime": 99.8719, |
|
"eval_samples_per_second": 42.855, |
|
"eval_steps_per_second": 5.357, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 4.434060228452752e-05, |
|
"loss": 0.395, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 4.4236760124610595e-05, |
|
"loss": 0.3672, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.413291796469366e-05, |
|
"loss": 0.3994, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.4029075804776743e-05, |
|
"loss": 0.3818, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.392523364485982e-05, |
|
"loss": 0.4013, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.3821391484942885e-05, |
|
"loss": 0.3409, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.3717549325025966e-05, |
|
"loss": 0.4753, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 4.3613707165109034e-05, |
|
"loss": 0.3671, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 4.350986500519211e-05, |
|
"loss": 0.462, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 4.340602284527518e-05, |
|
"loss": 0.3729, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 4.3302180685358256e-05, |
|
"loss": 0.3876, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 4.319833852544133e-05, |
|
"loss": 0.3794, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 4.3094496365524405e-05, |
|
"loss": 0.3609, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 4.299065420560748e-05, |
|
"loss": 0.4161, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 4.2886812045690546e-05, |
|
"loss": 0.5202, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 4.278296988577363e-05, |
|
"loss": 0.4579, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 4.26791277258567e-05, |
|
"loss": 0.408, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 4.257528556593977e-05, |
|
"loss": 0.36, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 4.247144340602285e-05, |
|
"loss": 0.3109, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 4.236760124610592e-05, |
|
"loss": 0.3713, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 4.226375908618899e-05, |
|
"loss": 0.4329, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 4.2159916926272066e-05, |
|
"loss": 0.3612, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 4.205607476635514e-05, |
|
"loss": 0.387, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 4.1952232606438215e-05, |
|
"loss": 0.4461, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 4.184839044652129e-05, |
|
"loss": 0.2903, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 4.174454828660436e-05, |
|
"loss": 0.328, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 4.164070612668744e-05, |
|
"loss": 0.4151, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 4.153686396677051e-05, |
|
"loss": 0.3888, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 4.1433021806853586e-05, |
|
"loss": 0.3871, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 4.132917964693666e-05, |
|
"loss": 0.4304, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 4.1225337487019734e-05, |
|
"loss": 0.387, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 4.11214953271028e-05, |
|
"loss": 0.3658, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 4.101765316718588e-05, |
|
"loss": 0.4494, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 4.091381100726896e-05, |
|
"loss": 0.3355, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 4.0809968847352024e-05, |
|
"loss": 0.3368, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 4.0706126687435105e-05, |
|
"loss": 0.4087, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 4.060228452751817e-05, |
|
"loss": 0.4279, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.049844236760125e-05, |
|
"loss": 0.3548, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.039460020768432e-05, |
|
"loss": 0.4466, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.0290758047767395e-05, |
|
"loss": 0.4133, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 4.018691588785047e-05, |
|
"loss": 0.3845, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 4.0083073727933544e-05, |
|
"loss": 0.4471, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 3.997923156801662e-05, |
|
"loss": 0.4421, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.987538940809969e-05, |
|
"loss": 0.4383, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 3.9771547248182767e-05, |
|
"loss": 0.3326, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 3.966770508826584e-05, |
|
"loss": 0.4058, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 3.956386292834891e-05, |
|
"loss": 0.3653, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 3.946002076843199e-05, |
|
"loss": 0.4157, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.935617860851506e-05, |
|
"loss": 0.3944, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 3.925233644859813e-05, |
|
"loss": 0.4103, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 3.914849428868121e-05, |
|
"loss": 0.4601, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 3.904465212876428e-05, |
|
"loss": 0.3561, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 3.8940809968847354e-05, |
|
"loss": 0.287, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.8642523364485981, |
|
"eval_loss": 0.39544418454170227, |
|
"eval_runtime": 95.8501, |
|
"eval_samples_per_second": 44.653, |
|
"eval_steps_per_second": 5.582, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 3.883696780893043e-05, |
|
"loss": 0.3781, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 3.87331256490135e-05, |
|
"loss": 0.3999, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 3.8629283489096576e-05, |
|
"loss": 0.3507, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 3.852544132917965e-05, |
|
"loss": 0.354, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 3.8421599169262725e-05, |
|
"loss": 0.4233, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 3.831775700934579e-05, |
|
"loss": 0.3766, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 3.821391484942887e-05, |
|
"loss": 0.4494, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 3.811007268951195e-05, |
|
"loss": 0.3772, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 3.8006230529595015e-05, |
|
"loss": 0.4079, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 3.7902388369678096e-05, |
|
"loss": 0.3796, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 3.779854620976116e-05, |
|
"loss": 0.2899, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 3.769470404984424e-05, |
|
"loss": 0.357, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 3.759086188992731e-05, |
|
"loss": 0.3922, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 3.7487019730010386e-05, |
|
"loss": 0.3743, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 3.738317757009346e-05, |
|
"loss": 0.3275, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 3.7279335410176535e-05, |
|
"loss": 0.3128, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 3.717549325025961e-05, |
|
"loss": 0.3351, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 3.7071651090342676e-05, |
|
"loss": 0.354, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 3.696780893042576e-05, |
|
"loss": 0.3604, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 3.686396677050883e-05, |
|
"loss": 0.389, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 3.67601246105919e-05, |
|
"loss": 0.3892, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 3.665628245067498e-05, |
|
"loss": 0.3525, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 3.655244029075805e-05, |
|
"loss": 0.3725, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 3.644859813084112e-05, |
|
"loss": 0.3909, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 3.6344755970924196e-05, |
|
"loss": 0.2884, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 3.624091381100727e-05, |
|
"loss": 0.3384, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 3.6137071651090344e-05, |
|
"loss": 0.3645, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 3.603322949117342e-05, |
|
"loss": 0.3407, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 3.592938733125649e-05, |
|
"loss": 0.3847, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 3.55, |
|
"learning_rate": 3.582554517133957e-05, |
|
"loss": 0.3862, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 3.572170301142264e-05, |
|
"loss": 0.3831, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 3.5617860851505715e-05, |
|
"loss": 0.3147, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 3.551401869158878e-05, |
|
"loss": 0.3686, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 3.5410176531671864e-05, |
|
"loss": 0.3913, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 3.530633437175493e-05, |
|
"loss": 0.3717, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 3.5202492211838006e-05, |
|
"loss": 0.4135, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 3.5098650051921087e-05, |
|
"loss": 0.372, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 3.4994807892004154e-05, |
|
"loss": 0.3738, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 3.489096573208723e-05, |
|
"loss": 0.3847, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 3.47871235721703e-05, |
|
"loss": 0.3893, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 3.468328141225338e-05, |
|
"loss": 0.3045, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 3.457943925233645e-05, |
|
"loss": 0.3242, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 3.4475597092419525e-05, |
|
"loss": 0.3417, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 3.43717549325026e-05, |
|
"loss": 0.3568, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 3.426791277258567e-05, |
|
"loss": 0.3225, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 3.416407061266875e-05, |
|
"loss": 0.3341, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 3.406022845275182e-05, |
|
"loss": 0.326, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 3.395638629283489e-05, |
|
"loss": 0.3508, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 3.385254413291797e-05, |
|
"loss": 0.333, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 3.374870197300104e-05, |
|
"loss": 0.4373, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 3.364485981308411e-05, |
|
"loss": 0.3769, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 3.3541017653167186e-05, |
|
"loss": 0.3697, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 3.343717549325026e-05, |
|
"loss": 0.3187, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 0.4211, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.8700934579439252, |
|
"eval_loss": 0.39060959219932556, |
|
"eval_runtime": 93.1195, |
|
"eval_samples_per_second": 45.962, |
|
"eval_steps_per_second": 5.745, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 3.322949117341641e-05, |
|
"loss": 0.2857, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 3.3125649013499483e-05, |
|
"loss": 0.3271, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 3.302180685358255e-05, |
|
"loss": 0.3109, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 3.291796469366563e-05, |
|
"loss": 0.2679, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 3.2814122533748706e-05, |
|
"loss": 0.3242, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 3.2710280373831774e-05, |
|
"loss": 0.3083, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 3.2606438213914855e-05, |
|
"loss": 0.3325, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 3.250259605399792e-05, |
|
"loss": 0.3989, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 3.2398753894080996e-05, |
|
"loss": 0.3044, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 3.229491173416408e-05, |
|
"loss": 0.3389, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 3.2191069574247145e-05, |
|
"loss": 0.3284, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 3.208722741433022e-05, |
|
"loss": 0.2777, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 3.198338525441329e-05, |
|
"loss": 0.3531, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 3.187954309449637e-05, |
|
"loss": 0.3578, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 3.177570093457944e-05, |
|
"loss": 0.375, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 3.1671858774662516e-05, |
|
"loss": 0.3912, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 3.156801661474559e-05, |
|
"loss": 0.3632, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 3.146417445482866e-05, |
|
"loss": 0.3453, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 3.136033229491174e-05, |
|
"loss": 0.3412, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 3.1256490134994806e-05, |
|
"loss": 0.3506, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 3.115264797507788e-05, |
|
"loss": 0.3178, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 3.104880581516096e-05, |
|
"loss": 0.3787, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 3.094496365524403e-05, |
|
"loss": 0.2522, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 3.08411214953271e-05, |
|
"loss": 0.2986, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 3.073727933541018e-05, |
|
"loss": 0.3647, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 3.063343717549325e-05, |
|
"loss": 0.2927, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 3.0529595015576326e-05, |
|
"loss": 0.3868, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 3.04257528556594e-05, |
|
"loss": 0.2927, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 3.0321910695742474e-05, |
|
"loss": 0.2755, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 3.0218068535825545e-05, |
|
"loss": 0.2953, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 3.0114226375908622e-05, |
|
"loss": 0.2946, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 3.0010384215991693e-05, |
|
"loss": 0.3302, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 2.9906542056074764e-05, |
|
"loss": 0.406, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 2.9802699896157842e-05, |
|
"loss": 0.323, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 2.9698857736240916e-05, |
|
"loss": 0.3808, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 2.9595015576323987e-05, |
|
"loss": 0.3269, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"learning_rate": 2.9491173416407064e-05, |
|
"loss": 0.3703, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 2.9387331256490135e-05, |
|
"loss": 0.3173, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 2.9283489096573206e-05, |
|
"loss": 0.2749, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 2.9179646936656284e-05, |
|
"loss": 0.3128, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 2.9075804776739358e-05, |
|
"loss": 0.3725, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 2.897196261682243e-05, |
|
"loss": 0.3052, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 2.8868120456905506e-05, |
|
"loss": 0.3159, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 2.8764278296988577e-05, |
|
"loss": 0.27, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"learning_rate": 2.866043613707165e-05, |
|
"loss": 0.3566, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 2.855659397715473e-05, |
|
"loss": 0.3622, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 2.84527518172378e-05, |
|
"loss": 0.3318, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 2.834890965732087e-05, |
|
"loss": 0.334, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 2.824506749740395e-05, |
|
"loss": 0.3084, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"learning_rate": 2.814122533748702e-05, |
|
"loss": 0.3524, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 2.8037383177570094e-05, |
|
"loss": 0.3473, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 2.793354101765317e-05, |
|
"loss": 0.3593, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 2.7829698857736242e-05, |
|
"loss": 0.316, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_accuracy": 0.8754672897196262, |
|
"eval_loss": 0.37160804867744446, |
|
"eval_runtime": 95.1891, |
|
"eval_samples_per_second": 44.963, |
|
"eval_steps_per_second": 5.62, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 2.7725856697819313e-05, |
|
"loss": 0.2738, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 2.762201453790239e-05, |
|
"loss": 0.342, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 2.751817237798546e-05, |
|
"loss": 0.3448, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 5.07, |
|
"learning_rate": 2.7414330218068536e-05, |
|
"loss": 0.3099, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"learning_rate": 2.7310488058151613e-05, |
|
"loss": 0.3368, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 2.7206645898234684e-05, |
|
"loss": 0.2743, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 2.7102803738317755e-05, |
|
"loss": 0.3064, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 2.6998961578400832e-05, |
|
"loss": 0.2109, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 5.16, |
|
"learning_rate": 2.6895119418483907e-05, |
|
"loss": 0.3611, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"learning_rate": 2.6791277258566978e-05, |
|
"loss": 0.3456, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 2.6687435098650055e-05, |
|
"loss": 0.2599, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 2.6583592938733126e-05, |
|
"loss": 0.2951, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 2.6479750778816197e-05, |
|
"loss": 0.1794, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 2.6375908618899274e-05, |
|
"loss": 0.2954, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 2.627206645898235e-05, |
|
"loss": 0.298, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"learning_rate": 2.616822429906542e-05, |
|
"loss": 0.3187, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 2.6064382139148497e-05, |
|
"loss": 0.3297, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"learning_rate": 2.5960539979231568e-05, |
|
"loss": 0.2431, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 2.585669781931464e-05, |
|
"loss": 0.3967, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 2.5752855659397716e-05, |
|
"loss": 0.3722, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 2.564901349948079e-05, |
|
"loss": 0.2752, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 2.554517133956386e-05, |
|
"loss": 0.2971, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"learning_rate": 2.544132917964694e-05, |
|
"loss": 0.2804, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 2.533748701973001e-05, |
|
"loss": 0.3715, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"learning_rate": 2.5233644859813084e-05, |
|
"loss": 0.2833, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 2.512980269989616e-05, |
|
"loss": 0.2891, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 2.5025960539979233e-05, |
|
"loss": 0.3122, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 2.4922118380062307e-05, |
|
"loss": 0.2891, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 2.481827622014538e-05, |
|
"loss": 0.3167, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 2.4714434060228452e-05, |
|
"loss": 0.3505, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"learning_rate": 2.4610591900311526e-05, |
|
"loss": 0.3255, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 2.4506749740394604e-05, |
|
"loss": 0.2708, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 2.4402907580477675e-05, |
|
"loss": 0.3709, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 2.429906542056075e-05, |
|
"loss": 0.3623, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 2.4195223260643823e-05, |
|
"loss": 0.2488, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 2.4091381100726894e-05, |
|
"loss": 0.3012, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 2.398753894080997e-05, |
|
"loss": 0.318, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"learning_rate": 2.3883696780893046e-05, |
|
"loss": 0.3596, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 2.3779854620976117e-05, |
|
"loss": 0.2923, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 2.367601246105919e-05, |
|
"loss": 0.3677, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 2.3572170301142265e-05, |
|
"loss": 0.2508, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 2.3468328141225336e-05, |
|
"loss": 0.2464, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"learning_rate": 2.3364485981308414e-05, |
|
"loss": 0.3364, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"learning_rate": 2.3260643821391488e-05, |
|
"loss": 0.2931, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 2.315680166147456e-05, |
|
"loss": 0.2962, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 2.3052959501557633e-05, |
|
"loss": 0.282, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 2.2949117341640707e-05, |
|
"loss": 0.2978, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 2.284527518172378e-05, |
|
"loss": 0.2524, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 2.2741433021806856e-05, |
|
"loss": 0.3384, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"learning_rate": 2.263759086188993e-05, |
|
"loss": 0.2936, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 2.2533748701973e-05, |
|
"loss": 0.2521, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 2.2429906542056075e-05, |
|
"loss": 0.3685, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"learning_rate": 2.232606438213915e-05, |
|
"loss": 0.2391, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 2.2222222222222223e-05, |
|
"loss": 0.2709, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_accuracy": 0.8735981308411215, |
|
"eval_loss": 0.37839841842651367, |
|
"eval_runtime": 92.127, |
|
"eval_samples_per_second": 46.458, |
|
"eval_steps_per_second": 5.807, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"learning_rate": 2.2118380062305298e-05, |
|
"loss": 0.2305, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"learning_rate": 2.2014537902388372e-05, |
|
"loss": 0.2758, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"learning_rate": 2.1910695742471443e-05, |
|
"loss": 0.2602, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"learning_rate": 2.1806853582554517e-05, |
|
"loss": 0.311, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 2.170301142263759e-05, |
|
"loss": 0.3315, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 6.11, |
|
"learning_rate": 2.1599169262720665e-05, |
|
"loss": 0.3078, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 6.13, |
|
"learning_rate": 2.149532710280374e-05, |
|
"loss": 0.319, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"learning_rate": 2.1391484942886814e-05, |
|
"loss": 0.3326, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 2.1287642782969885e-05, |
|
"loss": 0.2892, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"learning_rate": 2.118380062305296e-05, |
|
"loss": 0.2669, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 6.21, |
|
"learning_rate": 2.1079958463136033e-05, |
|
"loss": 0.2829, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 2.0976116303219107e-05, |
|
"loss": 0.2434, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"learning_rate": 2.087227414330218e-05, |
|
"loss": 0.2422, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"learning_rate": 2.0768431983385256e-05, |
|
"loss": 0.3655, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"learning_rate": 2.066458982346833e-05, |
|
"loss": 0.2772, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"learning_rate": 2.05607476635514e-05, |
|
"loss": 0.2542, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"learning_rate": 2.045690550363448e-05, |
|
"loss": 0.2978, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 2.0353063343717553e-05, |
|
"loss": 0.2518, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"learning_rate": 2.0249221183800623e-05, |
|
"loss": 0.2697, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"learning_rate": 2.0145379023883698e-05, |
|
"loss": 0.2326, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 2.0041536863966772e-05, |
|
"loss": 0.29, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"learning_rate": 1.9937694704049846e-05, |
|
"loss": 0.2858, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"learning_rate": 1.983385254413292e-05, |
|
"loss": 0.2665, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"learning_rate": 1.9730010384215995e-05, |
|
"loss": 0.2583, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 6.47, |
|
"learning_rate": 1.9626168224299065e-05, |
|
"loss": 0.3213, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 1.952232606438214e-05, |
|
"loss": 0.3385, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 1.9418483904465214e-05, |
|
"loss": 0.2492, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"learning_rate": 1.9314641744548288e-05, |
|
"loss": 0.2829, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"learning_rate": 1.9210799584631362e-05, |
|
"loss": 0.2651, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 1.9106957424714437e-05, |
|
"loss": 0.2671, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 1.9003115264797507e-05, |
|
"loss": 0.3116, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"learning_rate": 1.889927310488058e-05, |
|
"loss": 0.3116, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"learning_rate": 1.8795430944963656e-05, |
|
"loss": 0.2305, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 1.869158878504673e-05, |
|
"loss": 0.3035, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 6.65, |
|
"learning_rate": 1.8587746625129804e-05, |
|
"loss": 0.2388, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 1.848390446521288e-05, |
|
"loss": 0.2424, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 1.838006230529595e-05, |
|
"loss": 0.219, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"learning_rate": 1.8276220145379024e-05, |
|
"loss": 0.212, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 1.8172377985462098e-05, |
|
"loss": 0.3682, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 1.8068535825545172e-05, |
|
"loss": 0.2882, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"learning_rate": 1.7964693665628246e-05, |
|
"loss": 0.2846, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"learning_rate": 1.786085150571132e-05, |
|
"loss": 0.276, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"learning_rate": 1.775700934579439e-05, |
|
"loss": 0.214, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"learning_rate": 1.7653167185877466e-05, |
|
"loss": 0.2351, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"learning_rate": 1.7549325025960543e-05, |
|
"loss": 0.2428, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"learning_rate": 1.7445482866043614e-05, |
|
"loss": 0.2289, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 1.734164070612669e-05, |
|
"loss": 0.2087, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"learning_rate": 1.7237798546209763e-05, |
|
"loss": 0.2492, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 6.92, |
|
"learning_rate": 1.7133956386292833e-05, |
|
"loss": 0.2855, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 1.703011422637591e-05, |
|
"loss": 0.2701, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 6.95, |
|
"learning_rate": 1.6926272066458985e-05, |
|
"loss": 0.2635, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 1.6822429906542056e-05, |
|
"loss": 0.267, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"learning_rate": 1.671858774662513e-05, |
|
"loss": 0.177, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_accuracy": 0.8745327102803738, |
|
"eval_loss": 0.37715595960617065, |
|
"eval_runtime": 95.1073, |
|
"eval_samples_per_second": 45.002, |
|
"eval_steps_per_second": 5.625, |
|
"step": 3745 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"learning_rate": 1.6614745586708205e-05, |
|
"loss": 0.307, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 1.6510903426791275e-05, |
|
"loss": 0.258, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 1.6407061266874353e-05, |
|
"loss": 0.2322, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 7.07, |
|
"learning_rate": 1.6303219106957427e-05, |
|
"loss": 0.187, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"learning_rate": 1.6199376947040498e-05, |
|
"loss": 0.2838, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"learning_rate": 1.6095534787123572e-05, |
|
"loss": 0.2353, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"learning_rate": 1.5991692627206647e-05, |
|
"loss": 0.2661, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 7.14, |
|
"learning_rate": 1.588785046728972e-05, |
|
"loss": 0.1917, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"learning_rate": 1.5784008307372795e-05, |
|
"loss": 0.1978, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"learning_rate": 1.568016614745587e-05, |
|
"loss": 0.2688, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"learning_rate": 1.557632398753894e-05, |
|
"loss": 0.2383, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 1.5472481827622014e-05, |
|
"loss": 0.2874, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"learning_rate": 1.536863966770509e-05, |
|
"loss": 0.265, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 1.5264797507788163e-05, |
|
"loss": 0.1821, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"learning_rate": 1.5160955347871237e-05, |
|
"loss": 0.193, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"learning_rate": 1.5057113187954311e-05, |
|
"loss": 0.2681, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"learning_rate": 1.4953271028037382e-05, |
|
"loss": 0.2406, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 7.33, |
|
"learning_rate": 1.4849428868120458e-05, |
|
"loss": 0.3214, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 7.35, |
|
"learning_rate": 1.4745586708203532e-05, |
|
"loss": 0.2521, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"learning_rate": 1.4641744548286603e-05, |
|
"loss": 0.3427, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"learning_rate": 1.4537902388369679e-05, |
|
"loss": 0.3272, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 7.4, |
|
"learning_rate": 1.4434060228452753e-05, |
|
"loss": 0.2336, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"learning_rate": 1.4330218068535826e-05, |
|
"loss": 0.2209, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 7.44, |
|
"learning_rate": 1.42263759086189e-05, |
|
"loss": 0.222, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 7.46, |
|
"learning_rate": 1.4122533748701974e-05, |
|
"loss": 0.236, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 7.48, |
|
"learning_rate": 1.4018691588785047e-05, |
|
"loss": 0.2875, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 1.3914849428868121e-05, |
|
"loss": 0.2098, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 7.51, |
|
"learning_rate": 1.3811007268951195e-05, |
|
"loss": 0.324, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 7.53, |
|
"learning_rate": 1.3707165109034268e-05, |
|
"loss": 0.2038, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"learning_rate": 1.3603322949117342e-05, |
|
"loss": 0.271, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"learning_rate": 1.3499480789200416e-05, |
|
"loss": 0.296, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"learning_rate": 1.3395638629283489e-05, |
|
"loss": 0.2557, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"learning_rate": 1.3291796469366563e-05, |
|
"loss": 0.2788, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"learning_rate": 1.3187954309449637e-05, |
|
"loss": 0.2684, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 7.64, |
|
"learning_rate": 1.308411214953271e-05, |
|
"loss": 0.2447, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"learning_rate": 1.2980269989615784e-05, |
|
"loss": 0.287, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 7.68, |
|
"learning_rate": 1.2876427829698858e-05, |
|
"loss": 0.2186, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 7.7, |
|
"learning_rate": 1.277258566978193e-05, |
|
"loss": 0.1978, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"learning_rate": 1.2668743509865005e-05, |
|
"loss": 0.2698, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 7.74, |
|
"learning_rate": 1.256490134994808e-05, |
|
"loss": 0.1624, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"learning_rate": 1.2461059190031153e-05, |
|
"loss": 0.2591, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 7.78, |
|
"learning_rate": 1.2357217030114226e-05, |
|
"loss": 0.2035, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"learning_rate": 1.2253374870197302e-05, |
|
"loss": 0.2285, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 1.2149532710280374e-05, |
|
"loss": 0.2257, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"learning_rate": 1.2045690550363447e-05, |
|
"loss": 0.2555, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 7.85, |
|
"learning_rate": 1.1941848390446523e-05, |
|
"loss": 0.2192, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"learning_rate": 1.1838006230529595e-05, |
|
"loss": 0.2169, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 7.89, |
|
"learning_rate": 1.1734164070612668e-05, |
|
"loss": 0.2914, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 7.91, |
|
"learning_rate": 1.1630321910695744e-05, |
|
"loss": 0.253, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 7.93, |
|
"learning_rate": 1.1526479750778816e-05, |
|
"loss": 0.2067, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 7.94, |
|
"learning_rate": 1.142263759086189e-05, |
|
"loss": 0.1995, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 7.96, |
|
"learning_rate": 1.1318795430944965e-05, |
|
"loss": 0.2902, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 7.98, |
|
"learning_rate": 1.1214953271028037e-05, |
|
"loss": 0.2051, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"learning_rate": 1.1111111111111112e-05, |
|
"loss": 0.2409, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_accuracy": 0.8761682242990654, |
|
"eval_loss": 0.38753223419189453, |
|
"eval_runtime": 91.8924, |
|
"eval_samples_per_second": 46.576, |
|
"eval_steps_per_second": 5.822, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 8.02, |
|
"learning_rate": 1.1007268951194186e-05, |
|
"loss": 0.2436, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 8.04, |
|
"learning_rate": 1.0903426791277258e-05, |
|
"loss": 0.2468, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"learning_rate": 1.0799584631360333e-05, |
|
"loss": 0.168, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 8.07, |
|
"learning_rate": 1.0695742471443407e-05, |
|
"loss": 0.2093, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"learning_rate": 1.059190031152648e-05, |
|
"loss": 0.1616, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"learning_rate": 1.0488058151609554e-05, |
|
"loss": 0.1912, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 8.13, |
|
"learning_rate": 1.0384215991692628e-05, |
|
"loss": 0.3157, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 8.15, |
|
"learning_rate": 1.02803738317757e-05, |
|
"loss": 0.2213, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 8.17, |
|
"learning_rate": 1.0176531671858776e-05, |
|
"loss": 0.2473, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 8.19, |
|
"learning_rate": 1.0072689511941849e-05, |
|
"loss": 0.2155, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 8.21, |
|
"learning_rate": 9.968847352024923e-06, |
|
"loss": 0.209, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 8.22, |
|
"learning_rate": 9.865005192107997e-06, |
|
"loss": 0.2552, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"learning_rate": 9.76116303219107e-06, |
|
"loss": 0.2469, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 8.26, |
|
"learning_rate": 9.657320872274144e-06, |
|
"loss": 0.2195, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 8.28, |
|
"learning_rate": 9.553478712357218e-06, |
|
"loss": 0.2094, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 8.3, |
|
"learning_rate": 9.44963655244029e-06, |
|
"loss": 0.2593, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"learning_rate": 9.345794392523365e-06, |
|
"loss": 0.254, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 8.34, |
|
"learning_rate": 9.24195223260644e-06, |
|
"loss": 0.1353, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 8.36, |
|
"learning_rate": 9.138110072689512e-06, |
|
"loss": 0.2152, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 8.37, |
|
"learning_rate": 9.034267912772586e-06, |
|
"loss": 0.2258, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 8.39, |
|
"learning_rate": 8.93042575285566e-06, |
|
"loss": 0.1593, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 8.41, |
|
"learning_rate": 8.826583592938733e-06, |
|
"loss": 0.1939, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 8.43, |
|
"learning_rate": 8.722741433021807e-06, |
|
"loss": 0.2717, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 8.45, |
|
"learning_rate": 8.618899273104881e-06, |
|
"loss": 0.2263, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 8.47, |
|
"learning_rate": 8.515057113187956e-06, |
|
"loss": 0.281, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 8.49, |
|
"learning_rate": 8.411214953271028e-06, |
|
"loss": 0.2438, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"learning_rate": 8.307372793354102e-06, |
|
"loss": 0.1487, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 8.52, |
|
"learning_rate": 8.203530633437177e-06, |
|
"loss": 0.1811, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 8.54, |
|
"learning_rate": 8.099688473520249e-06, |
|
"loss": 0.2803, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 8.56, |
|
"learning_rate": 7.995846313603323e-06, |
|
"loss": 0.2339, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 8.58, |
|
"learning_rate": 7.892004153686398e-06, |
|
"loss": 0.2617, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 8.6, |
|
"learning_rate": 7.78816199376947e-06, |
|
"loss": 0.3019, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 8.62, |
|
"learning_rate": 7.684319833852544e-06, |
|
"loss": 0.2083, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 8.64, |
|
"learning_rate": 7.5804776739356185e-06, |
|
"loss": 0.24, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 8.65, |
|
"learning_rate": 7.476635514018691e-06, |
|
"loss": 0.2378, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 8.67, |
|
"learning_rate": 7.372793354101766e-06, |
|
"loss": 0.2475, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 8.69, |
|
"learning_rate": 7.2689511941848395e-06, |
|
"loss": 0.197, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 8.71, |
|
"learning_rate": 7.165109034267913e-06, |
|
"loss": 0.2302, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 8.73, |
|
"learning_rate": 7.061266874350987e-06, |
|
"loss": 0.2186, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"learning_rate": 6.9574247144340605e-06, |
|
"loss": 0.2172, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 8.77, |
|
"learning_rate": 6.853582554517134e-06, |
|
"loss": 0.2189, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 8.79, |
|
"learning_rate": 6.749740394600208e-06, |
|
"loss": 0.2137, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"learning_rate": 6.6458982346832815e-06, |
|
"loss": 0.2259, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 8.82, |
|
"learning_rate": 6.542056074766355e-06, |
|
"loss": 0.15, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 8.84, |
|
"learning_rate": 6.438213914849429e-06, |
|
"loss": 0.2622, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 8.86, |
|
"learning_rate": 6.3343717549325025e-06, |
|
"loss": 0.2009, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 8.88, |
|
"learning_rate": 6.230529595015577e-06, |
|
"loss": 0.2095, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 8.9, |
|
"learning_rate": 6.126687435098651e-06, |
|
"loss": 0.1766, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 8.92, |
|
"learning_rate": 6.0228452751817235e-06, |
|
"loss": 0.1982, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"learning_rate": 5.919003115264798e-06, |
|
"loss": 0.2287, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 8.95, |
|
"learning_rate": 5.815160955347872e-06, |
|
"loss": 0.266, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 8.97, |
|
"learning_rate": 5.711318795430945e-06, |
|
"loss": 0.2196, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 8.99, |
|
"learning_rate": 5.607476635514019e-06, |
|
"loss": 0.1929, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_accuracy": 0.8707943925233644, |
|
"eval_loss": 0.3915020227432251, |
|
"eval_runtime": 93.367, |
|
"eval_samples_per_second": 45.841, |
|
"eval_steps_per_second": 5.73, |
|
"step": 4815 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"learning_rate": 5.503634475597093e-06, |
|
"loss": 0.2137, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 9.03, |
|
"learning_rate": 5.399792315680166e-06, |
|
"loss": 0.1994, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 9.05, |
|
"learning_rate": 5.29595015576324e-06, |
|
"loss": 0.2243, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 9.07, |
|
"learning_rate": 5.192107995846314e-06, |
|
"loss": 0.282, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 9.08, |
|
"learning_rate": 5.088265835929388e-06, |
|
"loss": 0.1425, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 9.1, |
|
"learning_rate": 4.9844236760124615e-06, |
|
"loss": 0.2213, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 9.12, |
|
"learning_rate": 4.880581516095535e-06, |
|
"loss": 0.1939, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 9.14, |
|
"learning_rate": 4.776739356178609e-06, |
|
"loss": 0.1753, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 9.16, |
|
"learning_rate": 4.6728971962616825e-06, |
|
"loss": 0.1758, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 9.18, |
|
"learning_rate": 4.569055036344756e-06, |
|
"loss": 0.1597, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 9.2, |
|
"learning_rate": 4.46521287642783e-06, |
|
"loss": 0.2269, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 9.21, |
|
"learning_rate": 4.3613707165109035e-06, |
|
"loss": 0.2723, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 9.23, |
|
"learning_rate": 4.257528556593978e-06, |
|
"loss": 0.2012, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 9.25, |
|
"learning_rate": 4.153686396677051e-06, |
|
"loss": 0.1863, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 9.27, |
|
"learning_rate": 4.0498442367601245e-06, |
|
"loss": 0.2029, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 9.29, |
|
"learning_rate": 3.946002076843199e-06, |
|
"loss": 0.2067, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 9.31, |
|
"learning_rate": 3.842159916926272e-06, |
|
"loss": 0.2359, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 9.33, |
|
"learning_rate": 3.7383177570093455e-06, |
|
"loss": 0.2398, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 9.35, |
|
"learning_rate": 3.6344755970924198e-06, |
|
"loss": 0.2233, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 9.36, |
|
"learning_rate": 3.5306334371754936e-06, |
|
"loss": 0.1934, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 9.38, |
|
"learning_rate": 3.426791277258567e-06, |
|
"loss": 0.2625, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 9.4, |
|
"learning_rate": 3.3229491173416407e-06, |
|
"loss": 0.2548, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 9.42, |
|
"learning_rate": 3.2191069574247146e-06, |
|
"loss": 0.1718, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 9.44, |
|
"learning_rate": 3.1152647975077884e-06, |
|
"loss": 0.1668, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 9.46, |
|
"learning_rate": 3.0114226375908617e-06, |
|
"loss": 0.2202, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 9.48, |
|
"learning_rate": 2.907580477673936e-06, |
|
"loss": 0.2091, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 9.5, |
|
"learning_rate": 2.8037383177570094e-06, |
|
"loss": 0.2213, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 9.51, |
|
"learning_rate": 2.699896157840083e-06, |
|
"loss": 0.2857, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 9.53, |
|
"learning_rate": 2.596053997923157e-06, |
|
"loss": 0.2209, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 9.55, |
|
"learning_rate": 2.4922118380062308e-06, |
|
"loss": 0.2341, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 9.57, |
|
"learning_rate": 2.3883696780893046e-06, |
|
"loss": 0.2111, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"learning_rate": 2.284527518172378e-06, |
|
"loss": 0.1869, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 9.61, |
|
"learning_rate": 2.1806853582554518e-06, |
|
"loss": 0.2229, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 9.63, |
|
"learning_rate": 2.0768431983385256e-06, |
|
"loss": 0.2251, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 9.64, |
|
"learning_rate": 1.9730010384215994e-06, |
|
"loss": 0.183, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 9.66, |
|
"learning_rate": 1.8691588785046728e-06, |
|
"loss": 0.1606, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 9.68, |
|
"learning_rate": 1.7653167185877468e-06, |
|
"loss": 0.2067, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 9.7, |
|
"learning_rate": 1.6614745586708204e-06, |
|
"loss": 0.2346, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 9.72, |
|
"learning_rate": 1.5576323987538942e-06, |
|
"loss": 0.2161, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 9.74, |
|
"learning_rate": 1.453790238836968e-06, |
|
"loss": 0.2893, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 9.76, |
|
"learning_rate": 1.3499480789200416e-06, |
|
"loss": 0.2196, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 9.78, |
|
"learning_rate": 1.2461059190031154e-06, |
|
"loss": 0.2035, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 9.79, |
|
"learning_rate": 1.142263759086189e-06, |
|
"loss": 0.1541, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 9.81, |
|
"learning_rate": 1.0384215991692628e-06, |
|
"loss": 0.2105, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 9.83, |
|
"learning_rate": 9.345794392523364e-07, |
|
"loss": 0.2065, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 9.85, |
|
"learning_rate": 8.307372793354102e-07, |
|
"loss": 0.161, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 9.87, |
|
"learning_rate": 7.26895119418484e-07, |
|
"loss": 0.2285, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 9.89, |
|
"learning_rate": 6.230529595015577e-07, |
|
"loss": 0.2051, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"learning_rate": 5.192107995846314e-07, |
|
"loss": 0.2291, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 9.93, |
|
"learning_rate": 4.153686396677051e-07, |
|
"loss": 0.2071, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 9.94, |
|
"learning_rate": 3.1152647975077885e-07, |
|
"loss": 0.2055, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 9.96, |
|
"learning_rate": 2.0768431983385255e-07, |
|
"loss": 0.1433, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 9.98, |
|
"learning_rate": 1.0384215991692627e-07, |
|
"loss": 0.2493, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.1877, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_accuracy": 0.8714953271028038, |
|
"eval_loss": 0.38807550072669983, |
|
"eval_runtime": 92.2965, |
|
"eval_samples_per_second": 46.372, |
|
"eval_steps_per_second": 5.797, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"step": 5350, |
|
"total_flos": 1.3264660513609667e+19, |
|
"train_loss": 0.36025063641717503, |
|
"train_runtime": 7147.2488, |
|
"train_samples_per_second": 23.949, |
|
"train_steps_per_second": 0.749 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_accuracy": 0.8761682242990654, |
|
"eval_loss": 0.38753223419189453, |
|
"eval_runtime": 92.7247, |
|
"eval_samples_per_second": 46.158, |
|
"eval_steps_per_second": 5.77, |
|
"step": 5350 |
|
} |
|
], |
|
"max_steps": 5350, |
|
"num_train_epochs": 10, |
|
"total_flos": 1.3264660513609667e+19, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|