|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.6160310277957337, |
|
"eval_steps": 1000, |
|
"global_step": 5000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00808015513897867, |
|
"grad_norm": 3.75984263420105, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 0.8538, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.01616031027795734, |
|
"grad_norm": 3.785407066345215, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 0.7059, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.024240465416936006, |
|
"grad_norm": 2.6398749351501465, |
|
"learning_rate": 1.5e-06, |
|
"loss": 0.5499, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.03232062055591468, |
|
"grad_norm": 3.0633111000061035, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.4899, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.040400775694893344, |
|
"grad_norm": 2.5752687454223633, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.4542, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.04848093083387201, |
|
"grad_norm": 2.7746145725250244, |
|
"learning_rate": 3e-06, |
|
"loss": 0.4448, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.05656108597285068, |
|
"grad_norm": 2.5188117027282715, |
|
"learning_rate": 3.5e-06, |
|
"loss": 0.4047, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.06464124111182935, |
|
"grad_norm": 2.549173593521118, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.3918, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.07272139625080802, |
|
"grad_norm": 2.651440382003784, |
|
"learning_rate": 4.5e-06, |
|
"loss": 0.3738, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.08080155138978669, |
|
"grad_norm": 2.6359574794769287, |
|
"learning_rate": 5e-06, |
|
"loss": 0.3923, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.08888170652876536, |
|
"grad_norm": 3.015693426132202, |
|
"learning_rate": 5.500000000000001e-06, |
|
"loss": 0.3597, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.09696186166774402, |
|
"grad_norm": 2.4634430408477783, |
|
"learning_rate": 6e-06, |
|
"loss": 0.3577, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.10504201680672269, |
|
"grad_norm": 2.6268017292022705, |
|
"learning_rate": 6.5000000000000004e-06, |
|
"loss": 0.3397, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.11312217194570136, |
|
"grad_norm": 2.1071865558624268, |
|
"learning_rate": 7e-06, |
|
"loss": 0.344, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.12120232708468003, |
|
"grad_norm": 2.0719916820526123, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.3232, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.1292824822236587, |
|
"grad_norm": 2.445434331893921, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.3488, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.13736263736263737, |
|
"grad_norm": 2.1177170276641846, |
|
"learning_rate": 8.5e-06, |
|
"loss": 0.327, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.14544279250161604, |
|
"grad_norm": 2.2949392795562744, |
|
"learning_rate": 9e-06, |
|
"loss": 0.3228, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.1535229476405947, |
|
"grad_norm": 1.701798439025879, |
|
"learning_rate": 9.5e-06, |
|
"loss": 0.2907, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.16160310277957338, |
|
"grad_norm": 1.857657790184021, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3021, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.16968325791855204, |
|
"grad_norm": 1.8236263990402222, |
|
"learning_rate": 9.944444444444445e-06, |
|
"loss": 0.2878, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.1777634130575307, |
|
"grad_norm": 2.30806303024292, |
|
"learning_rate": 9.88888888888889e-06, |
|
"loss": 0.2741, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.18584356819650938, |
|
"grad_norm": 2.0603034496307373, |
|
"learning_rate": 9.833333333333333e-06, |
|
"loss": 0.2953, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.19392372333548805, |
|
"grad_norm": 2.1043479442596436, |
|
"learning_rate": 9.777777777777779e-06, |
|
"loss": 0.2855, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.2020038784744667, |
|
"grad_norm": 1.921908974647522, |
|
"learning_rate": 9.722222222222223e-06, |
|
"loss": 0.2709, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.21008403361344538, |
|
"grad_norm": 2.1535732746124268, |
|
"learning_rate": 9.666666666666667e-06, |
|
"loss": 0.251, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.21816418875242405, |
|
"grad_norm": 2.01731014251709, |
|
"learning_rate": 9.611111111111112e-06, |
|
"loss": 0.2817, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.22624434389140272, |
|
"grad_norm": 1.9176462888717651, |
|
"learning_rate": 9.555555555555556e-06, |
|
"loss": 0.2608, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.23432449903038138, |
|
"grad_norm": 2.2685463428497314, |
|
"learning_rate": 9.5e-06, |
|
"loss": 0.2584, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.24240465416936005, |
|
"grad_norm": 1.6904027462005615, |
|
"learning_rate": 9.444444444444445e-06, |
|
"loss": 0.2529, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.2504848093083387, |
|
"grad_norm": 2.014220714569092, |
|
"learning_rate": 9.38888888888889e-06, |
|
"loss": 0.2443, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.2585649644473174, |
|
"grad_norm": 2.1156768798828125, |
|
"learning_rate": 9.333333333333334e-06, |
|
"loss": 0.2548, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.26664511958629605, |
|
"grad_norm": 2.0581390857696533, |
|
"learning_rate": 9.277777777777778e-06, |
|
"loss": 0.2391, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.27472527472527475, |
|
"grad_norm": 1.9932626485824585, |
|
"learning_rate": 9.222222222222224e-06, |
|
"loss": 0.2326, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.2828054298642534, |
|
"grad_norm": 1.7348275184631348, |
|
"learning_rate": 9.166666666666666e-06, |
|
"loss": 0.2399, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.2908855850032321, |
|
"grad_norm": 1.7778377532958984, |
|
"learning_rate": 9.111111111111112e-06, |
|
"loss": 0.2272, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.2989657401422107, |
|
"grad_norm": 1.917076826095581, |
|
"learning_rate": 9.055555555555556e-06, |
|
"loss": 0.2498, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.3070458952811894, |
|
"grad_norm": 1.727513313293457, |
|
"learning_rate": 9e-06, |
|
"loss": 0.226, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.31512605042016806, |
|
"grad_norm": 2.011462688446045, |
|
"learning_rate": 8.944444444444446e-06, |
|
"loss": 0.2358, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.32320620555914675, |
|
"grad_norm": 2.0421934127807617, |
|
"learning_rate": 8.888888888888888e-06, |
|
"loss": 0.2469, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.32320620555914675, |
|
"eval_loss": 0.23132415115833282, |
|
"eval_runtime": 421.4559, |
|
"eval_samples_per_second": 2.373, |
|
"eval_steps_per_second": 0.149, |
|
"eval_wer": 0.17325433135828397, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.3312863606981254, |
|
"grad_norm": 2.1990480422973633, |
|
"learning_rate": 8.833333333333334e-06, |
|
"loss": 0.2397, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.3393665158371041, |
|
"grad_norm": 1.6808555126190186, |
|
"learning_rate": 8.777777777777778e-06, |
|
"loss": 0.212, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.3474466709760827, |
|
"grad_norm": 1.9749352931976318, |
|
"learning_rate": 8.722222222222224e-06, |
|
"loss": 0.2148, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.3555268261150614, |
|
"grad_norm": 2.0048928260803223, |
|
"learning_rate": 8.666666666666668e-06, |
|
"loss": 0.2161, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.36360698125404006, |
|
"grad_norm": 1.8271081447601318, |
|
"learning_rate": 8.611111111111112e-06, |
|
"loss": 0.237, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.37168713639301876, |
|
"grad_norm": 1.751715898513794, |
|
"learning_rate": 8.555555555555556e-06, |
|
"loss": 0.2088, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.3797672915319974, |
|
"grad_norm": 1.490201473236084, |
|
"learning_rate": 8.5e-06, |
|
"loss": 0.2207, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.3878474466709761, |
|
"grad_norm": 2.108602285385132, |
|
"learning_rate": 8.444444444444446e-06, |
|
"loss": 0.2177, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.39592760180995473, |
|
"grad_norm": 1.637518048286438, |
|
"learning_rate": 8.38888888888889e-06, |
|
"loss": 0.2134, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.4040077569489334, |
|
"grad_norm": 1.8813297748565674, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 0.2048, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.41208791208791207, |
|
"grad_norm": 1.61934494972229, |
|
"learning_rate": 8.277777777777778e-06, |
|
"loss": 0.1951, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.42016806722689076, |
|
"grad_norm": 1.9593381881713867, |
|
"learning_rate": 8.222222222222222e-06, |
|
"loss": 0.1989, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.4282482223658694, |
|
"grad_norm": 1.7399969100952148, |
|
"learning_rate": 8.166666666666668e-06, |
|
"loss": 0.1837, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.4363283775048481, |
|
"grad_norm": 1.9102469682693481, |
|
"learning_rate": 8.111111111111112e-06, |
|
"loss": 0.19, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.44440853264382674, |
|
"grad_norm": 1.8125574588775635, |
|
"learning_rate": 8.055555555555557e-06, |
|
"loss": 0.2015, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.45248868778280543, |
|
"grad_norm": 1.8675082921981812, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.2076, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.46056884292178407, |
|
"grad_norm": 2.056074619293213, |
|
"learning_rate": 7.944444444444445e-06, |
|
"loss": 0.2054, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.46864899806076277, |
|
"grad_norm": 1.4601351022720337, |
|
"learning_rate": 7.88888888888889e-06, |
|
"loss": 0.2057, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.47672915319974146, |
|
"grad_norm": 1.6205873489379883, |
|
"learning_rate": 7.833333333333333e-06, |
|
"loss": 0.1887, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.4848093083387201, |
|
"grad_norm": 1.356102705001831, |
|
"learning_rate": 7.77777777777778e-06, |
|
"loss": 0.1767, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.4928894634776988, |
|
"grad_norm": 2.1016225814819336, |
|
"learning_rate": 7.722222222222223e-06, |
|
"loss": 0.1859, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.5009696186166774, |
|
"grad_norm": 1.3985319137573242, |
|
"learning_rate": 7.666666666666667e-06, |
|
"loss": 0.1981, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.5090497737556561, |
|
"grad_norm": 1.8482751846313477, |
|
"learning_rate": 7.611111111111111e-06, |
|
"loss": 0.1979, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.5171299288946348, |
|
"grad_norm": 1.6422449350357056, |
|
"learning_rate": 7.555555555555556e-06, |
|
"loss": 0.1858, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.5252100840336135, |
|
"grad_norm": 1.5610370635986328, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.1988, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.5332902391725921, |
|
"grad_norm": 1.5282772779464722, |
|
"learning_rate": 7.444444444444445e-06, |
|
"loss": 0.1904, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.5413703943115707, |
|
"grad_norm": 1.5836628675460815, |
|
"learning_rate": 7.38888888888889e-06, |
|
"loss": 0.1913, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.5494505494505495, |
|
"grad_norm": 1.7465559244155884, |
|
"learning_rate": 7.333333333333333e-06, |
|
"loss": 0.1638, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.5575307045895281, |
|
"grad_norm": 1.7590513229370117, |
|
"learning_rate": 7.277777777777778e-06, |
|
"loss": 0.1946, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.5656108597285068, |
|
"grad_norm": 1.7977447509765625, |
|
"learning_rate": 7.222222222222223e-06, |
|
"loss": 0.1919, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.5736910148674854, |
|
"grad_norm": 1.5070719718933105, |
|
"learning_rate": 7.166666666666667e-06, |
|
"loss": 0.1801, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.5817711700064642, |
|
"grad_norm": 1.7022117376327515, |
|
"learning_rate": 7.111111111111112e-06, |
|
"loss": 0.1831, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.5898513251454428, |
|
"grad_norm": 1.6493513584136963, |
|
"learning_rate": 7.055555555555557e-06, |
|
"loss": 0.1889, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.5979314802844214, |
|
"grad_norm": 1.4757349491119385, |
|
"learning_rate": 7e-06, |
|
"loss": 0.1834, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.6060116354234001, |
|
"grad_norm": 1.7376744747161865, |
|
"learning_rate": 6.944444444444445e-06, |
|
"loss": 0.1676, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.6140917905623788, |
|
"grad_norm": 1.9608112573623657, |
|
"learning_rate": 6.88888888888889e-06, |
|
"loss": 0.1884, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.6221719457013575, |
|
"grad_norm": 1.412118673324585, |
|
"learning_rate": 6.833333333333334e-06, |
|
"loss": 0.1949, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.6302521008403361, |
|
"grad_norm": 2.119964361190796, |
|
"learning_rate": 6.777777777777779e-06, |
|
"loss": 0.193, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.6383322559793148, |
|
"grad_norm": 1.9113435745239258, |
|
"learning_rate": 6.7222222222222235e-06, |
|
"loss": 0.1662, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.6464124111182935, |
|
"grad_norm": 1.3223708868026733, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.1578, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.6464124111182935, |
|
"eval_loss": 0.1784905195236206, |
|
"eval_runtime": 427.4993, |
|
"eval_samples_per_second": 2.339, |
|
"eval_steps_per_second": 0.147, |
|
"eval_wer": 0.13732843321083027, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.6544925662572721, |
|
"grad_norm": 1.5342998504638672, |
|
"learning_rate": 6.6111111111111115e-06, |
|
"loss": 0.1727, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.6625727213962508, |
|
"grad_norm": 1.322567105293274, |
|
"learning_rate": 6.555555555555556e-06, |
|
"loss": 0.1858, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.6706528765352294, |
|
"grad_norm": 1.5235811471939087, |
|
"learning_rate": 6.5000000000000004e-06, |
|
"loss": 0.1523, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 0.6787330316742082, |
|
"grad_norm": 1.478602647781372, |
|
"learning_rate": 6.444444444444445e-06, |
|
"loss": 0.1667, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.6868131868131868, |
|
"grad_norm": 1.6120604276657104, |
|
"learning_rate": 6.3888888888888885e-06, |
|
"loss": 0.163, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 0.6948933419521655, |
|
"grad_norm": 1.3464877605438232, |
|
"learning_rate": 6.333333333333333e-06, |
|
"loss": 0.1624, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.7029734970911441, |
|
"grad_norm": 1.2172194719314575, |
|
"learning_rate": 6.277777777777778e-06, |
|
"loss": 0.1483, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 0.7110536522301228, |
|
"grad_norm": 1.5536859035491943, |
|
"learning_rate": 6.222222222222223e-06, |
|
"loss": 0.1552, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.7191338073691015, |
|
"grad_norm": 1.394464373588562, |
|
"learning_rate": 6.166666666666667e-06, |
|
"loss": 0.1536, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 0.7272139625080801, |
|
"grad_norm": 1.8853507041931152, |
|
"learning_rate": 6.111111111111112e-06, |
|
"loss": 0.1673, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.7352941176470589, |
|
"grad_norm": 1.529552936553955, |
|
"learning_rate": 6.055555555555555e-06, |
|
"loss": 0.1622, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 0.7433742727860375, |
|
"grad_norm": 1.541446566581726, |
|
"learning_rate": 6e-06, |
|
"loss": 0.1647, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.7514544279250162, |
|
"grad_norm": 1.6299506425857544, |
|
"learning_rate": 5.944444444444445e-06, |
|
"loss": 0.172, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.7595345830639948, |
|
"grad_norm": 1.5410858392715454, |
|
"learning_rate": 5.88888888888889e-06, |
|
"loss": 0.1558, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.7676147382029735, |
|
"grad_norm": 1.5796810388565063, |
|
"learning_rate": 5.833333333333334e-06, |
|
"loss": 0.158, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 0.7756948933419522, |
|
"grad_norm": 1.5637125968933105, |
|
"learning_rate": 5.777777777777778e-06, |
|
"loss": 0.1583, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.7837750484809308, |
|
"grad_norm": 1.4852577447891235, |
|
"learning_rate": 5.722222222222222e-06, |
|
"loss": 0.1618, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 0.7918552036199095, |
|
"grad_norm": 1.6491366624832153, |
|
"learning_rate": 5.666666666666667e-06, |
|
"loss": 0.16, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.7999353587588882, |
|
"grad_norm": 1.7685949802398682, |
|
"learning_rate": 5.611111111111112e-06, |
|
"loss": 0.1706, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 0.8080155138978669, |
|
"grad_norm": 1.4126862287521362, |
|
"learning_rate": 5.555555555555557e-06, |
|
"loss": 0.1525, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.8160956690368455, |
|
"grad_norm": 1.4942584037780762, |
|
"learning_rate": 5.500000000000001e-06, |
|
"loss": 0.1628, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 0.8241758241758241, |
|
"grad_norm": 1.6680798530578613, |
|
"learning_rate": 5.444444444444445e-06, |
|
"loss": 0.1647, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.8322559793148029, |
|
"grad_norm": 1.1703355312347412, |
|
"learning_rate": 5.388888888888889e-06, |
|
"loss": 0.1519, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 0.8403361344537815, |
|
"grad_norm": 1.4046279191970825, |
|
"learning_rate": 5.333333333333334e-06, |
|
"loss": 0.1492, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.8484162895927602, |
|
"grad_norm": 1.4554412364959717, |
|
"learning_rate": 5.2777777777777785e-06, |
|
"loss": 0.1452, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 0.8564964447317388, |
|
"grad_norm": 1.737576961517334, |
|
"learning_rate": 5.2222222222222226e-06, |
|
"loss": 0.1629, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.8645765998707176, |
|
"grad_norm": 1.261842966079712, |
|
"learning_rate": 5.1666666666666675e-06, |
|
"loss": 0.1539, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 0.8726567550096962, |
|
"grad_norm": 1.5773130655288696, |
|
"learning_rate": 5.1111111111111115e-06, |
|
"loss": 0.1434, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.8807369101486748, |
|
"grad_norm": 1.619884967803955, |
|
"learning_rate": 5.0555555555555555e-06, |
|
"loss": 0.1753, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 0.8888170652876535, |
|
"grad_norm": 1.175441861152649, |
|
"learning_rate": 5e-06, |
|
"loss": 0.1594, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.8968972204266322, |
|
"grad_norm": 1.8705310821533203, |
|
"learning_rate": 4.944444444444445e-06, |
|
"loss": 0.1618, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 0.9049773755656109, |
|
"grad_norm": 1.5610700845718384, |
|
"learning_rate": 4.888888888888889e-06, |
|
"loss": 0.1537, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.9130575307045895, |
|
"grad_norm": 1.940385103225708, |
|
"learning_rate": 4.833333333333333e-06, |
|
"loss": 0.1602, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 0.9211376858435681, |
|
"grad_norm": 1.4349099397659302, |
|
"learning_rate": 4.777777777777778e-06, |
|
"loss": 0.1492, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.9292178409825469, |
|
"grad_norm": 1.7276194095611572, |
|
"learning_rate": 4.722222222222222e-06, |
|
"loss": 0.1418, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 0.9372979961215255, |
|
"grad_norm": 1.138739824295044, |
|
"learning_rate": 4.666666666666667e-06, |
|
"loss": 0.1566, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.9453781512605042, |
|
"grad_norm": 1.1318280696868896, |
|
"learning_rate": 4.611111111111112e-06, |
|
"loss": 0.1489, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 0.9534583063994829, |
|
"grad_norm": 1.766103744506836, |
|
"learning_rate": 4.555555555555556e-06, |
|
"loss": 0.1567, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.9615384615384616, |
|
"grad_norm": 1.9743990898132324, |
|
"learning_rate": 4.5e-06, |
|
"loss": 0.1524, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 0.9696186166774402, |
|
"grad_norm": 1.7132006883621216, |
|
"learning_rate": 4.444444444444444e-06, |
|
"loss": 0.1491, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.9696186166774402, |
|
"eval_loss": 0.15305228531360626, |
|
"eval_runtime": 421.6838, |
|
"eval_samples_per_second": 2.371, |
|
"eval_steps_per_second": 0.149, |
|
"eval_wer": 0.12127803195079877, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.9776987718164188, |
|
"grad_norm": 1.675523042678833, |
|
"learning_rate": 4.388888888888889e-06, |
|
"loss": 0.1359, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 0.9857789269553976, |
|
"grad_norm": 1.1501892805099487, |
|
"learning_rate": 4.333333333333334e-06, |
|
"loss": 0.1527, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.9938590820943762, |
|
"grad_norm": 1.8712406158447266, |
|
"learning_rate": 4.277777777777778e-06, |
|
"loss": 0.1572, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 1.0019392372333549, |
|
"grad_norm": 1.688395619392395, |
|
"learning_rate": 4.222222222222223e-06, |
|
"loss": 0.143, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 1.0100193923723335, |
|
"grad_norm": 1.719205379486084, |
|
"learning_rate": 4.166666666666667e-06, |
|
"loss": 0.1135, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 1.0180995475113122, |
|
"grad_norm": 1.399117112159729, |
|
"learning_rate": 4.111111111111111e-06, |
|
"loss": 0.1138, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 1.0261797026502908, |
|
"grad_norm": 1.3488880395889282, |
|
"learning_rate": 4.055555555555556e-06, |
|
"loss": 0.097, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 1.0342598577892697, |
|
"grad_norm": 1.3759610652923584, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.1073, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 1.0423400129282483, |
|
"grad_norm": 1.2929582595825195, |
|
"learning_rate": 3.944444444444445e-06, |
|
"loss": 0.1067, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 1.050420168067227, |
|
"grad_norm": 1.2151849269866943, |
|
"learning_rate": 3.88888888888889e-06, |
|
"loss": 0.1023, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 1.0585003232062056, |
|
"grad_norm": 1.4267935752868652, |
|
"learning_rate": 3.833333333333334e-06, |
|
"loss": 0.1098, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 1.0665804783451842, |
|
"grad_norm": 1.4334278106689453, |
|
"learning_rate": 3.777777777777778e-06, |
|
"loss": 0.1001, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 1.0746606334841629, |
|
"grad_norm": 1.3168810606002808, |
|
"learning_rate": 3.7222222222222225e-06, |
|
"loss": 0.1025, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 1.0827407886231415, |
|
"grad_norm": 1.4960328340530396, |
|
"learning_rate": 3.6666666666666666e-06, |
|
"loss": 0.109, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 1.0908209437621201, |
|
"grad_norm": 1.430653691291809, |
|
"learning_rate": 3.6111111111111115e-06, |
|
"loss": 0.0938, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 1.098901098901099, |
|
"grad_norm": 1.261281132698059, |
|
"learning_rate": 3.555555555555556e-06, |
|
"loss": 0.0978, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 1.1069812540400776, |
|
"grad_norm": 1.2067958116531372, |
|
"learning_rate": 3.5e-06, |
|
"loss": 0.1008, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 1.1150614091790563, |
|
"grad_norm": 1.456865906715393, |
|
"learning_rate": 3.444444444444445e-06, |
|
"loss": 0.1122, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 1.123141564318035, |
|
"grad_norm": 1.0266293287277222, |
|
"learning_rate": 3.3888888888888893e-06, |
|
"loss": 0.0972, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 1.1312217194570136, |
|
"grad_norm": 1.4198247194290161, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 0.11, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.1393018745959922, |
|
"grad_norm": 1.3642793893814087, |
|
"learning_rate": 3.277777777777778e-06, |
|
"loss": 0.1031, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 1.1473820297349708, |
|
"grad_norm": 1.474433422088623, |
|
"learning_rate": 3.2222222222222227e-06, |
|
"loss": 0.0999, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 1.1554621848739495, |
|
"grad_norm": 1.726651668548584, |
|
"learning_rate": 3.1666666666666667e-06, |
|
"loss": 0.1052, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 1.1635423400129283, |
|
"grad_norm": 1.0963325500488281, |
|
"learning_rate": 3.1111111111111116e-06, |
|
"loss": 0.0889, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 1.171622495151907, |
|
"grad_norm": 1.0337573289871216, |
|
"learning_rate": 3.055555555555556e-06, |
|
"loss": 0.1078, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 1.1797026502908856, |
|
"grad_norm": 1.2517417669296265, |
|
"learning_rate": 3e-06, |
|
"loss": 0.0983, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 1.1877828054298643, |
|
"grad_norm": 1.1968861818313599, |
|
"learning_rate": 2.944444444444445e-06, |
|
"loss": 0.0981, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 1.195862960568843, |
|
"grad_norm": 1.646301031112671, |
|
"learning_rate": 2.888888888888889e-06, |
|
"loss": 0.0982, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 1.2039431157078215, |
|
"grad_norm": 1.5741595029830933, |
|
"learning_rate": 2.8333333333333335e-06, |
|
"loss": 0.0954, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 1.2120232708468002, |
|
"grad_norm": 0.865523099899292, |
|
"learning_rate": 2.7777777777777783e-06, |
|
"loss": 0.0936, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 1.220103425985779, |
|
"grad_norm": 1.109647512435913, |
|
"learning_rate": 2.7222222222222224e-06, |
|
"loss": 0.1092, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 1.2281835811247577, |
|
"grad_norm": 1.2372595071792603, |
|
"learning_rate": 2.666666666666667e-06, |
|
"loss": 0.1021, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 1.2362637362637363, |
|
"grad_norm": 1.5520275831222534, |
|
"learning_rate": 2.6111111111111113e-06, |
|
"loss": 0.0972, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 1.244343891402715, |
|
"grad_norm": 1.1914920806884766, |
|
"learning_rate": 2.5555555555555557e-06, |
|
"loss": 0.0945, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 1.2524240465416936, |
|
"grad_norm": 1.269389033317566, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.0949, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 1.2605042016806722, |
|
"grad_norm": 1.683188557624817, |
|
"learning_rate": 2.4444444444444447e-06, |
|
"loss": 0.0923, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 1.2685843568196509, |
|
"grad_norm": 1.4076889753341675, |
|
"learning_rate": 2.388888888888889e-06, |
|
"loss": 0.0992, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 1.2766645119586295, |
|
"grad_norm": 1.0736989974975586, |
|
"learning_rate": 2.3333333333333336e-06, |
|
"loss": 0.0927, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 1.2847446670976082, |
|
"grad_norm": 0.8500351905822754, |
|
"learning_rate": 2.277777777777778e-06, |
|
"loss": 0.092, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 1.292824822236587, |
|
"grad_norm": 1.2514774799346924, |
|
"learning_rate": 2.222222222222222e-06, |
|
"loss": 0.099, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.292824822236587, |
|
"eval_loss": 0.1433822065591812, |
|
"eval_runtime": 422.4096, |
|
"eval_samples_per_second": 2.367, |
|
"eval_steps_per_second": 0.149, |
|
"eval_wer": 0.11287782194554864, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.3009049773755657, |
|
"grad_norm": 1.5008472204208374, |
|
"learning_rate": 2.166666666666667e-06, |
|
"loss": 0.0973, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 1.3089851325145443, |
|
"grad_norm": 1.1695759296417236, |
|
"learning_rate": 2.1111111111111114e-06, |
|
"loss": 0.0903, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 1.317065287653523, |
|
"grad_norm": 1.221191644668579, |
|
"learning_rate": 2.0555555555555555e-06, |
|
"loss": 0.0955, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 1.3251454427925016, |
|
"grad_norm": 1.3147417306900024, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.1003, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 1.3332255979314802, |
|
"grad_norm": 1.5507885217666626, |
|
"learning_rate": 1.944444444444445e-06, |
|
"loss": 0.1098, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 1.341305753070459, |
|
"grad_norm": 1.1320619583129883, |
|
"learning_rate": 1.888888888888889e-06, |
|
"loss": 0.0902, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 1.3493859082094377, |
|
"grad_norm": 1.003406047821045, |
|
"learning_rate": 1.8333333333333333e-06, |
|
"loss": 0.0887, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 1.3574660633484164, |
|
"grad_norm": 1.2398234605789185, |
|
"learning_rate": 1.777777777777778e-06, |
|
"loss": 0.1044, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 1.365546218487395, |
|
"grad_norm": 1.3185756206512451, |
|
"learning_rate": 1.7222222222222224e-06, |
|
"loss": 0.0931, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 1.3736263736263736, |
|
"grad_norm": 1.8669284582138062, |
|
"learning_rate": 1.6666666666666667e-06, |
|
"loss": 0.0998, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 1.3817065287653523, |
|
"grad_norm": 1.4902080297470093, |
|
"learning_rate": 1.6111111111111113e-06, |
|
"loss": 0.0944, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 1.389786683904331, |
|
"grad_norm": 1.2797434329986572, |
|
"learning_rate": 1.5555555555555558e-06, |
|
"loss": 0.1002, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 1.3978668390433096, |
|
"grad_norm": 1.3823866844177246, |
|
"learning_rate": 1.5e-06, |
|
"loss": 0.0848, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 1.4059469941822882, |
|
"grad_norm": 1.5034900903701782, |
|
"learning_rate": 1.4444444444444445e-06, |
|
"loss": 0.089, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 1.4140271493212668, |
|
"grad_norm": 1.0645461082458496, |
|
"learning_rate": 1.3888888888888892e-06, |
|
"loss": 0.0814, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 1.4221073044602457, |
|
"grad_norm": 1.3759708404541016, |
|
"learning_rate": 1.3333333333333334e-06, |
|
"loss": 0.084, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 1.4301874595992243, |
|
"grad_norm": 1.6364449262619019, |
|
"learning_rate": 1.2777777777777779e-06, |
|
"loss": 0.1011, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 1.438267614738203, |
|
"grad_norm": 1.8542567491531372, |
|
"learning_rate": 1.2222222222222223e-06, |
|
"loss": 0.1041, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 1.4463477698771816, |
|
"grad_norm": 1.2790472507476807, |
|
"learning_rate": 1.1666666666666668e-06, |
|
"loss": 0.0768, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 1.4544279250161603, |
|
"grad_norm": 1.3523430824279785, |
|
"learning_rate": 1.111111111111111e-06, |
|
"loss": 0.0862, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 1.4625080801551391, |
|
"grad_norm": 1.2620325088500977, |
|
"learning_rate": 1.0555555555555557e-06, |
|
"loss": 0.0931, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 1.4705882352941178, |
|
"grad_norm": 1.5867211818695068, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 0.094, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 1.4786683904330964, |
|
"grad_norm": 1.3185136318206787, |
|
"learning_rate": 9.444444444444445e-07, |
|
"loss": 0.0987, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 1.486748545572075, |
|
"grad_norm": 1.5266519784927368, |
|
"learning_rate": 8.88888888888889e-07, |
|
"loss": 0.0936, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 1.4948287007110537, |
|
"grad_norm": 1.2671607732772827, |
|
"learning_rate": 8.333333333333333e-07, |
|
"loss": 0.0968, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 1.5029088558500323, |
|
"grad_norm": 0.9610714912414551, |
|
"learning_rate": 7.777777777777779e-07, |
|
"loss": 0.1071, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 1.510989010989011, |
|
"grad_norm": 1.585331678390503, |
|
"learning_rate": 7.222222222222222e-07, |
|
"loss": 0.0785, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 1.5190691661279896, |
|
"grad_norm": 1.5121099948883057, |
|
"learning_rate": 6.666666666666667e-07, |
|
"loss": 0.0802, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 1.5271493212669682, |
|
"grad_norm": 1.6761611700057983, |
|
"learning_rate": 6.111111111111112e-07, |
|
"loss": 0.0956, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 1.5352294764059469, |
|
"grad_norm": 1.0066338777542114, |
|
"learning_rate": 5.555555555555555e-07, |
|
"loss": 0.0875, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 1.5433096315449255, |
|
"grad_norm": 0.9917683601379395, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 0.0871, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 1.5513897866839044, |
|
"grad_norm": 1.4825103282928467, |
|
"learning_rate": 4.444444444444445e-07, |
|
"loss": 0.1008, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 1.559469941822883, |
|
"grad_norm": 1.3838367462158203, |
|
"learning_rate": 3.8888888888888895e-07, |
|
"loss": 0.0903, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 1.5675500969618616, |
|
"grad_norm": 1.0742937326431274, |
|
"learning_rate": 3.3333333333333335e-07, |
|
"loss": 0.0918, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 1.5756302521008403, |
|
"grad_norm": 1.3103052377700806, |
|
"learning_rate": 2.7777777777777776e-07, |
|
"loss": 0.0964, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 1.5837104072398192, |
|
"grad_norm": 1.6678719520568848, |
|
"learning_rate": 2.2222222222222224e-07, |
|
"loss": 0.1042, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 1.5917905623787978, |
|
"grad_norm": 1.3988322019577026, |
|
"learning_rate": 1.6666666666666668e-07, |
|
"loss": 0.0925, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 1.5998707175177764, |
|
"grad_norm": 1.1961122751235962, |
|
"learning_rate": 1.1111111111111112e-07, |
|
"loss": 0.0931, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 1.607950872656755, |
|
"grad_norm": 1.1993706226348877, |
|
"learning_rate": 5.555555555555556e-08, |
|
"loss": 0.1012, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 1.6160310277957337, |
|
"grad_norm": 1.3308823108673096, |
|
"learning_rate": 0.0, |
|
"loss": 0.0874, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.6160310277957337, |
|
"eval_loss": 0.1364615559577942, |
|
"eval_runtime": 437.3566, |
|
"eval_samples_per_second": 2.286, |
|
"eval_steps_per_second": 0.144, |
|
"eval_wer": 0.10755268881722042, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.6160310277957337, |
|
"step": 5000, |
|
"total_flos": 5.435725490631475e+20, |
|
"train_loss": 0.17819489703178407, |
|
"train_runtime": 33619.5535, |
|
"train_samples_per_second": 4.759, |
|
"train_steps_per_second": 0.149 |
|
} |
|
], |
|
"logging_steps": 25, |
|
"max_steps": 5000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 1000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.435725490631475e+20, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|