|
{ |
|
"best_metric": 0.989844278943805, |
|
"best_model_checkpoint": "salon_image_classifier_v1_convnext/checkpoint-1035", |
|
"epoch": 4.981949458483754, |
|
"eval_steps": 500, |
|
"global_step": 1035, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 6.277402877807617, |
|
"learning_rate": 2.403846153846154e-06, |
|
"loss": 1.4624, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 5.703274726867676, |
|
"learning_rate": 4.807692307692308e-06, |
|
"loss": 1.3971, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 7.412254810333252, |
|
"learning_rate": 7.211538461538461e-06, |
|
"loss": 1.3073, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 5.874199867248535, |
|
"learning_rate": 9.615384615384616e-06, |
|
"loss": 1.1877, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 4.8683342933654785, |
|
"learning_rate": 1.2019230769230771e-05, |
|
"loss": 1.0259, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 4.822791576385498, |
|
"learning_rate": 1.4423076923076923e-05, |
|
"loss": 0.8557, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 4.416042804718018, |
|
"learning_rate": 1.682692307692308e-05, |
|
"loss": 0.7254, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 3.968479633331299, |
|
"learning_rate": 1.923076923076923e-05, |
|
"loss": 0.5629, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 3.736450433731079, |
|
"learning_rate": 2.1634615384615387e-05, |
|
"loss": 0.4541, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 3.005037307739258, |
|
"learning_rate": 2.4038461538461542e-05, |
|
"loss": 0.3057, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 4.574312686920166, |
|
"learning_rate": 2.6442307692307694e-05, |
|
"loss": 0.2917, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 5.142052173614502, |
|
"learning_rate": 2.8846153846153845e-05, |
|
"loss": 0.2297, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 3.3775789737701416, |
|
"learning_rate": 3.125e-05, |
|
"loss": 0.2004, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 3.128685712814331, |
|
"learning_rate": 3.365384615384616e-05, |
|
"loss": 0.1851, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 5.491213798522949, |
|
"learning_rate": 3.605769230769231e-05, |
|
"loss": 0.1956, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 2.380528688430786, |
|
"learning_rate": 3.846153846153846e-05, |
|
"loss": 0.161, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 5.325184345245361, |
|
"learning_rate": 4.0865384615384615e-05, |
|
"loss": 0.1748, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 3.9706196784973145, |
|
"learning_rate": 4.326923076923077e-05, |
|
"loss": 0.1487, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 4.022750377655029, |
|
"learning_rate": 4.5673076923076925e-05, |
|
"loss": 0.1339, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 7.611146450042725, |
|
"learning_rate": 4.8076923076923084e-05, |
|
"loss": 0.1544, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 9.541460990905762, |
|
"learning_rate": 4.9946294307196566e-05, |
|
"loss": 0.1224, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 5.275872230529785, |
|
"learning_rate": 4.967776584317938e-05, |
|
"loss": 0.1511, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 2.8402061462402344, |
|
"learning_rate": 4.940923737916219e-05, |
|
"loss": 0.1081, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 6.35424280166626, |
|
"learning_rate": 4.9140708915145005e-05, |
|
"loss": 0.1288, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 6.0777435302734375, |
|
"learning_rate": 4.887218045112782e-05, |
|
"loss": 0.1277, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 3.709442377090454, |
|
"learning_rate": 4.860365198711064e-05, |
|
"loss": 0.136, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 6.1887593269348145, |
|
"learning_rate": 4.833512352309345e-05, |
|
"loss": 0.1758, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 4.514546871185303, |
|
"learning_rate": 4.806659505907626e-05, |
|
"loss": 0.156, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 5.032735824584961, |
|
"learning_rate": 4.7798066595059076e-05, |
|
"loss": 0.1593, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 9.502847671508789, |
|
"learning_rate": 4.7529538131041896e-05, |
|
"loss": 0.2014, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 2.6441538333892822, |
|
"learning_rate": 4.726100966702471e-05, |
|
"loss": 0.1331, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 2.8725085258483887, |
|
"learning_rate": 4.699248120300752e-05, |
|
"loss": 0.1261, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 2.6660351753234863, |
|
"learning_rate": 4.6723952738990334e-05, |
|
"loss": 0.1097, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 8.11793327331543, |
|
"learning_rate": 4.645542427497315e-05, |
|
"loss": 0.0933, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 3.4776053428649902, |
|
"learning_rate": 4.618689581095597e-05, |
|
"loss": 0.1543, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 3.1535816192626953, |
|
"learning_rate": 4.591836734693878e-05, |
|
"loss": 0.1245, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 4.248240947723389, |
|
"learning_rate": 4.564983888292159e-05, |
|
"loss": 0.1417, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 4.858673572540283, |
|
"learning_rate": 4.5381310418904406e-05, |
|
"loss": 0.1382, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 3.6954901218414307, |
|
"learning_rate": 4.511278195488722e-05, |
|
"loss": 0.1133, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 2.9366323947906494, |
|
"learning_rate": 4.484425349087004e-05, |
|
"loss": 0.1037, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 3.420692205429077, |
|
"learning_rate": 4.457572502685285e-05, |
|
"loss": 0.1269, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.9834123222748815, |
|
"eval_loss": 0.050108980387449265, |
|
"eval_runtime": 6.0814, |
|
"eval_samples_per_second": 485.741, |
|
"eval_steps_per_second": 15.292, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"grad_norm": 3.8513619899749756, |
|
"learning_rate": 4.4307196562835664e-05, |
|
"loss": 0.1179, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 3.5678696632385254, |
|
"learning_rate": 4.403866809881848e-05, |
|
"loss": 0.1067, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"grad_norm": 3.045541286468506, |
|
"learning_rate": 4.3770139634801297e-05, |
|
"loss": 0.1014, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 2.9802796840667725, |
|
"learning_rate": 4.350161117078411e-05, |
|
"loss": 0.0839, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 2.974304437637329, |
|
"learning_rate": 4.323308270676692e-05, |
|
"loss": 0.096, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"grad_norm": 3.204138994216919, |
|
"learning_rate": 4.2964554242749735e-05, |
|
"loss": 0.088, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"grad_norm": 5.182232856750488, |
|
"learning_rate": 4.269602577873255e-05, |
|
"loss": 0.0926, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"grad_norm": 2.830148458480835, |
|
"learning_rate": 4.242749731471536e-05, |
|
"loss": 0.0915, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 5.067986011505127, |
|
"learning_rate": 4.215896885069818e-05, |
|
"loss": 0.1051, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"grad_norm": 3.488647937774658, |
|
"learning_rate": 4.1890440386680994e-05, |
|
"loss": 0.0948, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 2.4556047916412354, |
|
"learning_rate": 4.1621911922663806e-05, |
|
"loss": 0.0915, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 4.81022834777832, |
|
"learning_rate": 4.135338345864662e-05, |
|
"loss": 0.1068, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 4.263596057891846, |
|
"learning_rate": 4.108485499462943e-05, |
|
"loss": 0.106, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"grad_norm": 2.286785840988159, |
|
"learning_rate": 4.0816326530612245e-05, |
|
"loss": 0.0784, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 3.0982165336608887, |
|
"learning_rate": 4.054779806659506e-05, |
|
"loss": 0.0774, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"grad_norm": 3.3483657836914062, |
|
"learning_rate": 4.027926960257787e-05, |
|
"loss": 0.0732, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"grad_norm": 3.933288335800171, |
|
"learning_rate": 4.0010741138560684e-05, |
|
"loss": 0.086, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 3.418642282485962, |
|
"learning_rate": 3.97422126745435e-05, |
|
"loss": 0.0803, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 3.975381374359131, |
|
"learning_rate": 3.9473684210526316e-05, |
|
"loss": 0.1137, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"grad_norm": 2.6510424613952637, |
|
"learning_rate": 3.920515574650913e-05, |
|
"loss": 0.1206, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"grad_norm": 2.9062960147857666, |
|
"learning_rate": 3.893662728249194e-05, |
|
"loss": 0.1042, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 2.369260549545288, |
|
"learning_rate": 3.866809881847476e-05, |
|
"loss": 0.0856, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 5.03189754486084, |
|
"learning_rate": 3.8399570354457575e-05, |
|
"loss": 0.0831, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"grad_norm": 4.001619338989258, |
|
"learning_rate": 3.813104189044039e-05, |
|
"loss": 0.0766, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"grad_norm": 3.2500596046447754, |
|
"learning_rate": 3.78625134264232e-05, |
|
"loss": 0.0881, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"grad_norm": 1.553835391998291, |
|
"learning_rate": 3.759398496240601e-05, |
|
"loss": 0.0714, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"grad_norm": 4.914017200469971, |
|
"learning_rate": 3.732545649838883e-05, |
|
"loss": 0.0944, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 2.7451980113983154, |
|
"learning_rate": 3.7056928034371646e-05, |
|
"loss": 0.0733, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 4.39937162399292, |
|
"learning_rate": 3.678839957035446e-05, |
|
"loss": 0.1127, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"grad_norm": 3.2537283897399902, |
|
"learning_rate": 3.651987110633727e-05, |
|
"loss": 0.1236, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 4.104055881500244, |
|
"learning_rate": 3.6251342642320084e-05, |
|
"loss": 0.114, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 2.0853748321533203, |
|
"learning_rate": 3.5982814178302904e-05, |
|
"loss": 0.0885, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 2.1333792209625244, |
|
"learning_rate": 3.571428571428572e-05, |
|
"loss": 0.0901, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"grad_norm": 2.9660935401916504, |
|
"learning_rate": 3.544575725026853e-05, |
|
"loss": 0.0753, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"grad_norm": 1.7010711431503296, |
|
"learning_rate": 3.517722878625134e-05, |
|
"loss": 0.094, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"grad_norm": 3.7029120922088623, |
|
"learning_rate": 3.4908700322234156e-05, |
|
"loss": 0.1148, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"grad_norm": 3.4257864952087402, |
|
"learning_rate": 3.4640171858216975e-05, |
|
"loss": 0.0879, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 6.967185020446777, |
|
"learning_rate": 3.437164339419979e-05, |
|
"loss": 0.1001, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"grad_norm": 1.414320707321167, |
|
"learning_rate": 3.41031149301826e-05, |
|
"loss": 0.1218, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 1.8158583641052246, |
|
"learning_rate": 3.3834586466165414e-05, |
|
"loss": 0.0646, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"grad_norm": 2.169985055923462, |
|
"learning_rate": 3.3566058002148234e-05, |
|
"loss": 0.0776, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 2.488377571105957, |
|
"learning_rate": 3.3297529538131046e-05, |
|
"loss": 0.0572, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.987136086662153, |
|
"eval_loss": 0.03736753389239311, |
|
"eval_runtime": 6.0106, |
|
"eval_samples_per_second": 491.469, |
|
"eval_steps_per_second": 15.473, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"grad_norm": 3.571716070175171, |
|
"learning_rate": 3.302900107411386e-05, |
|
"loss": 0.083, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 2.3017261028289795, |
|
"learning_rate": 3.276047261009667e-05, |
|
"loss": 0.0581, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"grad_norm": 4.040441513061523, |
|
"learning_rate": 3.2491944146079485e-05, |
|
"loss": 0.0762, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 1.5620166063308716, |
|
"learning_rate": 3.2223415682062305e-05, |
|
"loss": 0.0778, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"grad_norm": 2.1466052532196045, |
|
"learning_rate": 3.195488721804512e-05, |
|
"loss": 0.0876, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"grad_norm": 1.7395237684249878, |
|
"learning_rate": 3.168635875402793e-05, |
|
"loss": 0.0996, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 5.349012851715088, |
|
"learning_rate": 3.1417830290010743e-05, |
|
"loss": 0.072, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"grad_norm": 4.39583683013916, |
|
"learning_rate": 3.1149301825993556e-05, |
|
"loss": 0.0997, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 3.541804313659668, |
|
"learning_rate": 3.0880773361976376e-05, |
|
"loss": 0.0712, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 2.4857404232025146, |
|
"learning_rate": 3.061224489795919e-05, |
|
"loss": 0.0789, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"grad_norm": 3.7146120071411133, |
|
"learning_rate": 3.0343716433942e-05, |
|
"loss": 0.088, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 2.6535651683807373, |
|
"learning_rate": 3.007518796992481e-05, |
|
"loss": 0.0797, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"grad_norm": 1.3184159994125366, |
|
"learning_rate": 2.980665950590763e-05, |
|
"loss": 0.0846, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 2.7791616916656494, |
|
"learning_rate": 2.9538131041890444e-05, |
|
"loss": 0.074, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"grad_norm": 4.284481048583984, |
|
"learning_rate": 2.9269602577873257e-05, |
|
"loss": 0.0943, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 3.457939386367798, |
|
"learning_rate": 2.900107411385607e-05, |
|
"loss": 0.117, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 1.2724689245224, |
|
"learning_rate": 2.8732545649838882e-05, |
|
"loss": 0.0803, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 2.6043756008148193, |
|
"learning_rate": 2.8464017185821702e-05, |
|
"loss": 0.0672, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 3.096151113510132, |
|
"learning_rate": 2.8195488721804515e-05, |
|
"loss": 0.0848, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 1.409250259399414, |
|
"learning_rate": 2.7926960257787328e-05, |
|
"loss": 0.0472, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 1.6517168283462524, |
|
"learning_rate": 2.765843179377014e-05, |
|
"loss": 0.0798, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 2.9068679809570312, |
|
"learning_rate": 2.7389903329752954e-05, |
|
"loss": 0.0658, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"grad_norm": 3.5304489135742188, |
|
"learning_rate": 2.712137486573577e-05, |
|
"loss": 0.0512, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"grad_norm": 4.021263122558594, |
|
"learning_rate": 2.6852846401718583e-05, |
|
"loss": 0.0889, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 1.863612413406372, |
|
"learning_rate": 2.6584317937701396e-05, |
|
"loss": 0.0671, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"grad_norm": 2.903055191040039, |
|
"learning_rate": 2.6315789473684212e-05, |
|
"loss": 0.071, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 4.729198455810547, |
|
"learning_rate": 2.6047261009667025e-05, |
|
"loss": 0.0753, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"grad_norm": 4.479395866394043, |
|
"learning_rate": 2.577873254564984e-05, |
|
"loss": 0.088, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 1.303819179534912, |
|
"learning_rate": 2.5510204081632654e-05, |
|
"loss": 0.0606, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"grad_norm": 1.7099323272705078, |
|
"learning_rate": 2.5241675617615467e-05, |
|
"loss": 0.0563, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"grad_norm": 6.212317943572998, |
|
"learning_rate": 2.4973147153598283e-05, |
|
"loss": 0.0948, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"grad_norm": 2.4912729263305664, |
|
"learning_rate": 2.4704618689581096e-05, |
|
"loss": 0.0857, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"grad_norm": 2.978109359741211, |
|
"learning_rate": 2.443609022556391e-05, |
|
"loss": 0.0976, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"grad_norm": 1.6303725242614746, |
|
"learning_rate": 2.4167561761546725e-05, |
|
"loss": 0.0731, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 3.7856266498565674, |
|
"learning_rate": 2.3899033297529538e-05, |
|
"loss": 0.0833, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"grad_norm": 3.6364758014678955, |
|
"learning_rate": 2.3630504833512354e-05, |
|
"loss": 0.0659, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"grad_norm": 3.412216901779175, |
|
"learning_rate": 2.3361976369495167e-05, |
|
"loss": 0.1227, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"grad_norm": 4.180442810058594, |
|
"learning_rate": 2.3093447905477984e-05, |
|
"loss": 0.0773, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"grad_norm": 1.9002240896224976, |
|
"learning_rate": 2.2824919441460796e-05, |
|
"loss": 0.0656, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 3.6314845085144043, |
|
"learning_rate": 2.255639097744361e-05, |
|
"loss": 0.0768, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"grad_norm": 2.4498918056488037, |
|
"learning_rate": 2.2287862513426426e-05, |
|
"loss": 0.0806, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.9874746106973595, |
|
"eval_loss": 0.03359290212392807, |
|
"eval_runtime": 5.9679, |
|
"eval_samples_per_second": 494.983, |
|
"eval_steps_per_second": 15.583, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"grad_norm": 3.5386340618133545, |
|
"learning_rate": 2.201933404940924e-05, |
|
"loss": 0.0704, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"grad_norm": 2.9113972187042236, |
|
"learning_rate": 2.1750805585392055e-05, |
|
"loss": 0.0698, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"grad_norm": 4.226195812225342, |
|
"learning_rate": 2.1482277121374868e-05, |
|
"loss": 0.0728, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"grad_norm": 2.5996837615966797, |
|
"learning_rate": 2.121374865735768e-05, |
|
"loss": 0.0778, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"grad_norm": 3.129270076751709, |
|
"learning_rate": 2.0945220193340497e-05, |
|
"loss": 0.0803, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"grad_norm": 4.704379081726074, |
|
"learning_rate": 2.067669172932331e-05, |
|
"loss": 0.0605, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"grad_norm": 3.2946126461029053, |
|
"learning_rate": 2.0408163265306123e-05, |
|
"loss": 0.0719, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"grad_norm": 3.0328078269958496, |
|
"learning_rate": 2.0139634801288935e-05, |
|
"loss": 0.064, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 3.618300199508667, |
|
"learning_rate": 1.987110633727175e-05, |
|
"loss": 0.0673, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"grad_norm": 3.6683876514434814, |
|
"learning_rate": 1.9602577873254565e-05, |
|
"loss": 0.0571, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"grad_norm": 1.6800568103790283, |
|
"learning_rate": 1.933404940923738e-05, |
|
"loss": 0.047, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"grad_norm": 1.3418190479278564, |
|
"learning_rate": 1.9065520945220194e-05, |
|
"loss": 0.0633, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"grad_norm": 2.5490853786468506, |
|
"learning_rate": 1.8796992481203007e-05, |
|
"loss": 0.0402, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"grad_norm": 1.2604730129241943, |
|
"learning_rate": 1.8528464017185823e-05, |
|
"loss": 0.0605, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"grad_norm": 4.190303325653076, |
|
"learning_rate": 1.8259935553168636e-05, |
|
"loss": 0.0886, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"grad_norm": 7.317651748657227, |
|
"learning_rate": 1.7991407089151452e-05, |
|
"loss": 0.0875, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"grad_norm": 1.9360538721084595, |
|
"learning_rate": 1.7722878625134265e-05, |
|
"loss": 0.0557, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"grad_norm": 4.210879802703857, |
|
"learning_rate": 1.7454350161117078e-05, |
|
"loss": 0.0735, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"grad_norm": 5.373249053955078, |
|
"learning_rate": 1.7185821697099894e-05, |
|
"loss": 0.0975, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"grad_norm": 2.7579221725463867, |
|
"learning_rate": 1.6917293233082707e-05, |
|
"loss": 0.0705, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"grad_norm": 2.673476457595825, |
|
"learning_rate": 1.6648764769065523e-05, |
|
"loss": 0.0524, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"grad_norm": 4.691239356994629, |
|
"learning_rate": 1.6380236305048336e-05, |
|
"loss": 0.0686, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"grad_norm": 4.022672176361084, |
|
"learning_rate": 1.6111707841031152e-05, |
|
"loss": 0.0632, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"grad_norm": 2.69425630569458, |
|
"learning_rate": 1.5843179377013965e-05, |
|
"loss": 0.0636, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"grad_norm": 4.5648884773254395, |
|
"learning_rate": 1.5574650912996778e-05, |
|
"loss": 0.0627, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"grad_norm": 3.1452832221984863, |
|
"learning_rate": 1.5306122448979594e-05, |
|
"loss": 0.0763, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"grad_norm": 5.030516147613525, |
|
"learning_rate": 1.5037593984962406e-05, |
|
"loss": 0.0581, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"grad_norm": 3.2779948711395264, |
|
"learning_rate": 1.4769065520945222e-05, |
|
"loss": 0.0549, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"grad_norm": 1.7655272483825684, |
|
"learning_rate": 1.4500537056928035e-05, |
|
"loss": 0.0778, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"grad_norm": 3.9161980152130127, |
|
"learning_rate": 1.4232008592910851e-05, |
|
"loss": 0.0773, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"grad_norm": 3.029256820678711, |
|
"learning_rate": 1.3963480128893664e-05, |
|
"loss": 0.056, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"grad_norm": 4.829240322113037, |
|
"learning_rate": 1.3694951664876477e-05, |
|
"loss": 0.0722, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"grad_norm": 2.5825934410095215, |
|
"learning_rate": 1.3426423200859291e-05, |
|
"loss": 0.0372, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"grad_norm": 1.706741452217102, |
|
"learning_rate": 1.3157894736842106e-05, |
|
"loss": 0.0369, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"grad_norm": 2.689628839492798, |
|
"learning_rate": 1.288936627282492e-05, |
|
"loss": 0.0676, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"grad_norm": 3.5289785861968994, |
|
"learning_rate": 1.2620837808807733e-05, |
|
"loss": 0.0802, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"grad_norm": 3.4799952507019043, |
|
"learning_rate": 1.2352309344790548e-05, |
|
"loss": 0.0589, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"grad_norm": 1.8066169023513794, |
|
"learning_rate": 1.2083780880773363e-05, |
|
"loss": 0.0662, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"grad_norm": 2.680537700653076, |
|
"learning_rate": 1.1815252416756177e-05, |
|
"loss": 0.0482, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"grad_norm": 4.0623555183410645, |
|
"learning_rate": 1.1546723952738992e-05, |
|
"loss": 0.0786, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"grad_norm": 2.9296209812164307, |
|
"learning_rate": 1.1278195488721805e-05, |
|
"loss": 0.0406, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 3.4645802974700928, |
|
"learning_rate": 1.100966702470462e-05, |
|
"loss": 0.0589, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.988490182802979, |
|
"eval_loss": 0.03382693976163864, |
|
"eval_runtime": 6.0045, |
|
"eval_samples_per_second": 491.966, |
|
"eval_steps_per_second": 15.488, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"grad_norm": 3.1075704097747803, |
|
"learning_rate": 1.0741138560687434e-05, |
|
"loss": 0.0602, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"grad_norm": 0.9231489300727844, |
|
"learning_rate": 1.0472610096670248e-05, |
|
"loss": 0.0525, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"grad_norm": 1.0160259008407593, |
|
"learning_rate": 1.0204081632653061e-05, |
|
"loss": 0.0409, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"grad_norm": 1.9544317722320557, |
|
"learning_rate": 9.935553168635876e-06, |
|
"loss": 0.0504, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"grad_norm": 2.4267749786376953, |
|
"learning_rate": 9.66702470461869e-06, |
|
"loss": 0.059, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"grad_norm": 2.689138889312744, |
|
"learning_rate": 9.398496240601503e-06, |
|
"loss": 0.0625, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"grad_norm": 1.5462526082992554, |
|
"learning_rate": 9.129967776584318e-06, |
|
"loss": 0.0492, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"grad_norm": 4.044441223144531, |
|
"learning_rate": 8.861439312567132e-06, |
|
"loss": 0.05, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"grad_norm": 5.436426162719727, |
|
"learning_rate": 8.592910848549947e-06, |
|
"loss": 0.0455, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"grad_norm": 2.8710386753082275, |
|
"learning_rate": 8.324382384532762e-06, |
|
"loss": 0.0645, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"grad_norm": 3.6887547969818115, |
|
"learning_rate": 8.055853920515576e-06, |
|
"loss": 0.0623, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"grad_norm": 3.6889421939849854, |
|
"learning_rate": 7.787325456498389e-06, |
|
"loss": 0.0569, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"grad_norm": 3.734835624694824, |
|
"learning_rate": 7.518796992481203e-06, |
|
"loss": 0.0677, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"grad_norm": 2.4793076515197754, |
|
"learning_rate": 7.250268528464017e-06, |
|
"loss": 0.0445, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"grad_norm": 5.3167500495910645, |
|
"learning_rate": 6.981740064446832e-06, |
|
"loss": 0.0857, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"grad_norm": 2.9194352626800537, |
|
"learning_rate": 6.713211600429646e-06, |
|
"loss": 0.0704, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"grad_norm": 2.78092885017395, |
|
"learning_rate": 6.44468313641246e-06, |
|
"loss": 0.062, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"grad_norm": 2.4215569496154785, |
|
"learning_rate": 6.176154672395274e-06, |
|
"loss": 0.063, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"grad_norm": 2.3797125816345215, |
|
"learning_rate": 5.907626208378089e-06, |
|
"loss": 0.052, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"grad_norm": 0.7256618142127991, |
|
"learning_rate": 5.639097744360902e-06, |
|
"loss": 0.0468, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"grad_norm": 1.283785104751587, |
|
"learning_rate": 5.370569280343717e-06, |
|
"loss": 0.0577, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"grad_norm": 4.501251220703125, |
|
"learning_rate": 5.102040816326531e-06, |
|
"loss": 0.0595, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"grad_norm": 2.7582945823669434, |
|
"learning_rate": 4.833512352309345e-06, |
|
"loss": 0.0693, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"grad_norm": 5.989513397216797, |
|
"learning_rate": 4.564983888292159e-06, |
|
"loss": 0.0746, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"grad_norm": 3.373969554901123, |
|
"learning_rate": 4.2964554242749735e-06, |
|
"loss": 0.0529, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"grad_norm": 2.1218202114105225, |
|
"learning_rate": 4.027926960257788e-06, |
|
"loss": 0.0368, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"grad_norm": 1.9887917041778564, |
|
"learning_rate": 3.7593984962406014e-06, |
|
"loss": 0.0354, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"grad_norm": 1.6485974788665771, |
|
"learning_rate": 3.490870032223416e-06, |
|
"loss": 0.0653, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 4.69, |
|
"grad_norm": 2.1341357231140137, |
|
"learning_rate": 3.22234156820623e-06, |
|
"loss": 0.0421, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"grad_norm": 5.573878765106201, |
|
"learning_rate": 2.9538131041890443e-06, |
|
"loss": 0.0741, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"grad_norm": 3.1527225971221924, |
|
"learning_rate": 2.6852846401718585e-06, |
|
"loss": 0.0327, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"grad_norm": 3.773310661315918, |
|
"learning_rate": 2.4167561761546726e-06, |
|
"loss": 0.0472, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"grad_norm": 3.0205063819885254, |
|
"learning_rate": 2.1482277121374868e-06, |
|
"loss": 0.0723, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"grad_norm": 3.098895788192749, |
|
"learning_rate": 1.8796992481203007e-06, |
|
"loss": 0.0743, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 4.84, |
|
"grad_norm": 1.734661340713501, |
|
"learning_rate": 1.611170784103115e-06, |
|
"loss": 0.0337, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"grad_norm": 1.7586333751678467, |
|
"learning_rate": 1.3426423200859292e-06, |
|
"loss": 0.062, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"grad_norm": 2.720187187194824, |
|
"learning_rate": 1.0741138560687434e-06, |
|
"loss": 0.0791, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"grad_norm": 1.8995425701141357, |
|
"learning_rate": 8.055853920515575e-07, |
|
"loss": 0.061, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 4.93, |
|
"grad_norm": 4.119653224945068, |
|
"learning_rate": 5.370569280343717e-07, |
|
"loss": 0.0486, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"grad_norm": 3.3452024459838867, |
|
"learning_rate": 2.6852846401718585e-07, |
|
"loss": 0.0572, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"grad_norm": 1.4279868602752686, |
|
"learning_rate": 0.0, |
|
"loss": 0.0326, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"eval_accuracy": 0.989844278943805, |
|
"eval_loss": 0.029886629432439804, |
|
"eval_runtime": 5.9932, |
|
"eval_samples_per_second": 492.888, |
|
"eval_steps_per_second": 15.517, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"step": 1035, |
|
"total_flos": 3.2925088680822374e+18, |
|
"train_loss": 0.12655677452755434, |
|
"train_runtime": 660.3476, |
|
"train_samples_per_second": 201.303, |
|
"train_steps_per_second": 1.567 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1035, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"total_flos": 3.2925088680822374e+18, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|