|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 978, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.003067484662576687, |
|
"grad_norm": 177.2131714128933, |
|
"learning_rate": 2.0408163265306121e-07, |
|
"loss": 3.0795, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.015337423312883436, |
|
"grad_norm": 241.89792017630023, |
|
"learning_rate": 1.0204081632653063e-06, |
|
"loss": 2.738, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.03067484662576687, |
|
"grad_norm": 97.9005821739992, |
|
"learning_rate": 2.0408163265306125e-06, |
|
"loss": 2.573, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.046012269938650305, |
|
"grad_norm": 29.935305969711827, |
|
"learning_rate": 3.0612244897959185e-06, |
|
"loss": 2.4113, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.06134969325153374, |
|
"grad_norm": 6.7865011448005985, |
|
"learning_rate": 4.081632653061225e-06, |
|
"loss": 1.9856, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.07668711656441718, |
|
"grad_norm": 8.264677565869711, |
|
"learning_rate": 5.1020408163265315e-06, |
|
"loss": 1.8726, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.09202453987730061, |
|
"grad_norm": 19.32485419745683, |
|
"learning_rate": 6.122448979591837e-06, |
|
"loss": 1.7009, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.10736196319018405, |
|
"grad_norm": 10.162135754390622, |
|
"learning_rate": 7.1428571428571436e-06, |
|
"loss": 1.6191, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.12269938650306748, |
|
"grad_norm": 3.0916095898500555, |
|
"learning_rate": 8.16326530612245e-06, |
|
"loss": 1.5164, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.13803680981595093, |
|
"grad_norm": 43.413150705371685, |
|
"learning_rate": 9.183673469387756e-06, |
|
"loss": 1.5097, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.15337423312883436, |
|
"grad_norm": 49.53965734626817, |
|
"learning_rate": 1.0204081632653063e-05, |
|
"loss": 1.5785, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.1687116564417178, |
|
"grad_norm": 10.98873172852826, |
|
"learning_rate": 1.1224489795918367e-05, |
|
"loss": 1.4715, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.18404907975460122, |
|
"grad_norm": 2.7742625393808584, |
|
"learning_rate": 1.2244897959183674e-05, |
|
"loss": 1.4263, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.19938650306748465, |
|
"grad_norm": 23.101404729255652, |
|
"learning_rate": 1.326530612244898e-05, |
|
"loss": 1.4043, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.2147239263803681, |
|
"grad_norm": 12.92305440472016, |
|
"learning_rate": 1.4285714285714287e-05, |
|
"loss": 1.3793, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.23006134969325154, |
|
"grad_norm": 2.882766144097806, |
|
"learning_rate": 1.530612244897959e-05, |
|
"loss": 1.4183, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.24539877300613497, |
|
"grad_norm": 11.111007796269925, |
|
"learning_rate": 1.63265306122449e-05, |
|
"loss": 1.4517, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.2607361963190184, |
|
"grad_norm": 2.897394026367856, |
|
"learning_rate": 1.7346938775510206e-05, |
|
"loss": 1.3393, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.27607361963190186, |
|
"grad_norm": 2.9177477230316566, |
|
"learning_rate": 1.836734693877551e-05, |
|
"loss": 1.428, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.29141104294478526, |
|
"grad_norm": 2.912560429885746, |
|
"learning_rate": 1.9387755102040817e-05, |
|
"loss": 1.304, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.3067484662576687, |
|
"grad_norm": 2.5615070520092704, |
|
"learning_rate": 1.9999745104274995e-05, |
|
"loss": 1.2997, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.3220858895705521, |
|
"grad_norm": 2.9480036513492336, |
|
"learning_rate": 1.9996877676598733e-05, |
|
"loss": 1.4086, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.3374233128834356, |
|
"grad_norm": 2.5407276803281333, |
|
"learning_rate": 1.9990825118233958e-05, |
|
"loss": 1.3028, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.35276073619631904, |
|
"grad_norm": 3.239233594777726, |
|
"learning_rate": 1.9981589357601727e-05, |
|
"loss": 1.4185, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.36809815950920244, |
|
"grad_norm": 2.771896819307909, |
|
"learning_rate": 1.9969173337331283e-05, |
|
"loss": 1.3523, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.3834355828220859, |
|
"grad_norm": 2.7643269799604724, |
|
"learning_rate": 1.9953581013322503e-05, |
|
"loss": 1.3175, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.3987730061349693, |
|
"grad_norm": 3.508281226717504, |
|
"learning_rate": 1.99348173534855e-05, |
|
"loss": 1.2981, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.41411042944785276, |
|
"grad_norm": 2.4302828555210025, |
|
"learning_rate": 1.9912888336157793e-05, |
|
"loss": 1.3109, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.4294478527607362, |
|
"grad_norm": 2.3273613141747975, |
|
"learning_rate": 1.9887800948199496e-05, |
|
"loss": 1.2887, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.4447852760736196, |
|
"grad_norm": 2.502367541877984, |
|
"learning_rate": 1.9859563182767268e-05, |
|
"loss": 1.2735, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.4601226993865031, |
|
"grad_norm": 2.8584207562233113, |
|
"learning_rate": 1.9828184036767556e-05, |
|
"loss": 1.2271, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.4754601226993865, |
|
"grad_norm": 2.190347531165954, |
|
"learning_rate": 1.9793673507990086e-05, |
|
"loss": 1.2896, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.49079754601226994, |
|
"grad_norm": 2.2761435625476545, |
|
"learning_rate": 1.9756042591922436e-05, |
|
"loss": 1.3294, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.5061349693251533, |
|
"grad_norm": 2.220138706256322, |
|
"learning_rate": 1.9715303278246724e-05, |
|
"loss": 1.3188, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.5214723926380368, |
|
"grad_norm": 2.294310010674355, |
|
"learning_rate": 1.9671468547019575e-05, |
|
"loss": 1.2199, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.5368098159509203, |
|
"grad_norm": 2.513372192362633, |
|
"learning_rate": 1.9624552364536472e-05, |
|
"loss": 1.3106, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.5521472392638037, |
|
"grad_norm": 2.510468468716461, |
|
"learning_rate": 1.9574569678881965e-05, |
|
"loss": 1.3708, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.5674846625766872, |
|
"grad_norm": 2.041921641107936, |
|
"learning_rate": 1.952153641516698e-05, |
|
"loss": 1.1784, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.5828220858895705, |
|
"grad_norm": 2.27820722339295, |
|
"learning_rate": 1.94654694704549e-05, |
|
"loss": 1.1902, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.598159509202454, |
|
"grad_norm": 2.328841014102968, |
|
"learning_rate": 1.9406386708377956e-05, |
|
"loss": 1.316, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.6134969325153374, |
|
"grad_norm": 2.2926147060214066, |
|
"learning_rate": 1.9344306953445632e-05, |
|
"loss": 1.3843, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.6288343558282209, |
|
"grad_norm": 3.113673918623077, |
|
"learning_rate": 1.9279249985046948e-05, |
|
"loss": 1.3199, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.6441717791411042, |
|
"grad_norm": 2.105413236829464, |
|
"learning_rate": 1.92112365311485e-05, |
|
"loss": 1.2593, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.6595092024539877, |
|
"grad_norm": 2.2971282506758204, |
|
"learning_rate": 1.9140288261690278e-05, |
|
"loss": 1.2316, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.6748466257668712, |
|
"grad_norm": 3.060568400530995, |
|
"learning_rate": 1.9066427781681314e-05, |
|
"loss": 1.1986, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.6901840490797546, |
|
"grad_norm": 2.0851577530985663, |
|
"learning_rate": 1.8989678623997506e-05, |
|
"loss": 1.2465, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.7055214723926381, |
|
"grad_norm": 2.34599375716676, |
|
"learning_rate": 1.891006524188368e-05, |
|
"loss": 1.3111, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.7208588957055214, |
|
"grad_norm": 2.0045103451147352, |
|
"learning_rate": 1.8827613001162534e-05, |
|
"loss": 1.2048, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.7361963190184049, |
|
"grad_norm": 2.0865280550933663, |
|
"learning_rate": 1.8742348172152728e-05, |
|
"loss": 1.3875, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.7515337423312883, |
|
"grad_norm": 2.1899054440028602, |
|
"learning_rate": 1.8654297921298862e-05, |
|
"loss": 1.2177, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.7668711656441718, |
|
"grad_norm": 2.146615168451316, |
|
"learning_rate": 1.856349030251589e-05, |
|
"loss": 1.2302, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.7822085889570553, |
|
"grad_norm": 5.215245761172734, |
|
"learning_rate": 1.846995424825079e-05, |
|
"loss": 1.1558, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.7975460122699386, |
|
"grad_norm": 2.1303631206592253, |
|
"learning_rate": 1.837371956026433e-05, |
|
"loss": 1.2666, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.8128834355828221, |
|
"grad_norm": 3.324267620281515, |
|
"learning_rate": 1.8274816900135842e-05, |
|
"loss": 1.2796, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.8282208588957055, |
|
"grad_norm": 2.0369143733640174, |
|
"learning_rate": 1.817327777949407e-05, |
|
"loss": 1.2191, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.843558282208589, |
|
"grad_norm": 2.268965846881571, |
|
"learning_rate": 1.806913454997717e-05, |
|
"loss": 1.2382, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.8588957055214724, |
|
"grad_norm": 1.9534994546311044, |
|
"learning_rate": 1.7962420392925066e-05, |
|
"loss": 1.3029, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.8742331288343558, |
|
"grad_norm": 1.9407859425510332, |
|
"learning_rate": 1.785316930880745e-05, |
|
"loss": 1.183, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.8895705521472392, |
|
"grad_norm": 1.9539166093233689, |
|
"learning_rate": 1.7741416106390828e-05, |
|
"loss": 1.1725, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.9049079754601227, |
|
"grad_norm": 2.2999481127748216, |
|
"learning_rate": 1.7627196391647982e-05, |
|
"loss": 1.2114, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.9202453987730062, |
|
"grad_norm": 1.9126415566238704, |
|
"learning_rate": 1.75105465564135e-05, |
|
"loss": 1.2206, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.9355828220858896, |
|
"grad_norm": 2.3510054432341887, |
|
"learning_rate": 1.739150376678883e-05, |
|
"loss": 1.301, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.950920245398773, |
|
"grad_norm": 2.5269459534002596, |
|
"learning_rate": 1.727010595130074e-05, |
|
"loss": 1.3914, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.9662576687116564, |
|
"grad_norm": 2.045224623846914, |
|
"learning_rate": 1.714639178881678e-05, |
|
"loss": 1.2722, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.9815950920245399, |
|
"grad_norm": 2.0917960158205107, |
|
"learning_rate": 1.7020400696221737e-05, |
|
"loss": 1.2093, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.9969325153374233, |
|
"grad_norm": 2.017009094857716, |
|
"learning_rate": 1.6892172815858896e-05, |
|
"loss": 1.2658, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 1.2135429382324219, |
|
"eval_runtime": 6.9064, |
|
"eval_samples_per_second": 23.022, |
|
"eval_steps_per_second": 5.792, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 1.0122699386503067, |
|
"grad_norm": 2.1076821373912433, |
|
"learning_rate": 1.6761749002740195e-05, |
|
"loss": 1.126, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.0276073619631902, |
|
"grad_norm": 2.532948886360268, |
|
"learning_rate": 1.662917081152932e-05, |
|
"loss": 1.0517, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.0429447852760736, |
|
"grad_norm": 2.159362066124978, |
|
"learning_rate": 1.6494480483301836e-05, |
|
"loss": 0.9849, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.058282208588957, |
|
"grad_norm": 1.9634133830158156, |
|
"learning_rate": 1.635772093208669e-05, |
|
"loss": 0.961, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.0736196319018405, |
|
"grad_norm": 2.643597563770428, |
|
"learning_rate": 1.6218935731193223e-05, |
|
"loss": 0.929, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.0889570552147239, |
|
"grad_norm": 2.2336296848587986, |
|
"learning_rate": 1.6078169099328196e-05, |
|
"loss": 0.9695, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.1042944785276074, |
|
"grad_norm": 2.5120125840182803, |
|
"learning_rate": 1.5935465886507143e-05, |
|
"loss": 1.1179, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.1196319018404908, |
|
"grad_norm": 2.0896191655801735, |
|
"learning_rate": 1.579087155976459e-05, |
|
"loss": 1.1294, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.1349693251533743, |
|
"grad_norm": 2.325319374896529, |
|
"learning_rate": 1.5644432188667695e-05, |
|
"loss": 0.9826, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.1503067484662577, |
|
"grad_norm": 2.1728488841214095, |
|
"learning_rate": 1.5496194430637903e-05, |
|
"loss": 0.9251, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.165644171779141, |
|
"grad_norm": 2.424880159324192, |
|
"learning_rate": 1.5346205516085305e-05, |
|
"loss": 1.0463, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.1809815950920246, |
|
"grad_norm": 3.026265515790728, |
|
"learning_rate": 1.5194513233360439e-05, |
|
"loss": 0.9217, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.196319018404908, |
|
"grad_norm": 3.1617293471411654, |
|
"learning_rate": 1.504116591352832e-05, |
|
"loss": 1.1437, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.2116564417177913, |
|
"grad_norm": 2.2307237557720003, |
|
"learning_rate": 1.4886212414969551e-05, |
|
"loss": 0.967, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.2269938650306749, |
|
"grad_norm": 2.430911245376782, |
|
"learning_rate": 1.4729702107813438e-05, |
|
"loss": 0.999, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.2423312883435582, |
|
"grad_norm": 2.1612231748678896, |
|
"learning_rate": 1.4571684858208045e-05, |
|
"loss": 1.004, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.2576687116564418, |
|
"grad_norm": 2.6596296516845563, |
|
"learning_rate": 1.4412211012432213e-05, |
|
"loss": 1.0454, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.2730061349693251, |
|
"grad_norm": 2.093932365865954, |
|
"learning_rate": 1.4251331380854602e-05, |
|
"loss": 1.0569, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.2883435582822087, |
|
"grad_norm": 2.72153245614344, |
|
"learning_rate": 1.408909722174487e-05, |
|
"loss": 0.9593, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.303680981595092, |
|
"grad_norm": 2.6462362091450515, |
|
"learning_rate": 1.3925560224942145e-05, |
|
"loss": 0.9271, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.3190184049079754, |
|
"grad_norm": 2.267205865998314, |
|
"learning_rate": 1.3760772495385998e-05, |
|
"loss": 0.9582, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.334355828220859, |
|
"grad_norm": 14.299415996684987, |
|
"learning_rate": 1.3594786536515154e-05, |
|
"loss": 0.9659, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.3496932515337423, |
|
"grad_norm": 2.8919289960885157, |
|
"learning_rate": 1.3427655233539227e-05, |
|
"loss": 0.922, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.3650306748466257, |
|
"grad_norm": 2.0424273794301344, |
|
"learning_rate": 1.3259431836588843e-05, |
|
"loss": 0.9932, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.3803680981595092, |
|
"grad_norm": 6.610636759407049, |
|
"learning_rate": 1.3090169943749475e-05, |
|
"loss": 0.9837, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.3957055214723926, |
|
"grad_norm": 2.2497868172019357, |
|
"learning_rate": 1.2919923483984415e-05, |
|
"loss": 0.9149, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.4110429447852761, |
|
"grad_norm": 2.2842331908069125, |
|
"learning_rate": 1.2748746699952338e-05, |
|
"loss": 1.0342, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.4263803680981595, |
|
"grad_norm": 2.1102577421193085, |
|
"learning_rate": 1.2576694130724905e-05, |
|
"loss": 1.1568, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.441717791411043, |
|
"grad_norm": 2.127882920435348, |
|
"learning_rate": 1.2403820594409926e-05, |
|
"loss": 0.9638, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.4570552147239264, |
|
"grad_norm": 2.584359793762212, |
|
"learning_rate": 1.2230181170685636e-05, |
|
"loss": 0.9673, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.4723926380368098, |
|
"grad_norm": 2.264911154520169, |
|
"learning_rate": 1.2055831183251608e-05, |
|
"loss": 1.0154, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.4877300613496933, |
|
"grad_norm": 2.0087330179243335, |
|
"learning_rate": 1.1880826182201926e-05, |
|
"loss": 0.8973, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.5030674846625767, |
|
"grad_norm": 1.9321511470889676, |
|
"learning_rate": 1.170522192632624e-05, |
|
"loss": 0.9371, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.51840490797546, |
|
"grad_norm": 2.121122226050571, |
|
"learning_rate": 1.1529074365344302e-05, |
|
"loss": 1.033, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.5337423312883436, |
|
"grad_norm": 2.029011447995027, |
|
"learning_rate": 1.1352439622079689e-05, |
|
"loss": 0.9768, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.5490797546012272, |
|
"grad_norm": 2.108744801443365, |
|
"learning_rate": 1.1175373974578378e-05, |
|
"loss": 0.9089, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.5644171779141103, |
|
"grad_norm": 2.2204587915399876, |
|
"learning_rate": 1.0997933838177828e-05, |
|
"loss": 1.0449, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.5797546012269938, |
|
"grad_norm": 2.1535325181232294, |
|
"learning_rate": 1.0820175747532373e-05, |
|
"loss": 1.0172, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.5950920245398774, |
|
"grad_norm": 21.819229878115912, |
|
"learning_rate": 1.064215633860055e-05, |
|
"loss": 0.9769, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.6104294478527608, |
|
"grad_norm": 2.1296751297142897, |
|
"learning_rate": 1.0463932330600197e-05, |
|
"loss": 0.9483, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.6257668711656441, |
|
"grad_norm": 2.1616752600527134, |
|
"learning_rate": 1.0285560507936962e-05, |
|
"loss": 4.3216, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.6411042944785277, |
|
"grad_norm": 3.2786842755698795, |
|
"learning_rate": 1.010709770211212e-05, |
|
"loss": 1.1255, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.656441717791411, |
|
"grad_norm": 2.4763448399092147, |
|
"learning_rate": 9.928600773615306e-06, |
|
"loss": 1.0498, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.6717791411042944, |
|
"grad_norm": 2.006782735936947, |
|
"learning_rate": 9.750126593808083e-06, |
|
"loss": 0.927, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.687116564417178, |
|
"grad_norm": 2.2437759465461777, |
|
"learning_rate": 9.571732026803978e-06, |
|
"loss": 0.906, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.7024539877300615, |
|
"grad_norm": 1.9556898555009257, |
|
"learning_rate": 9.393473911350895e-06, |
|
"loss": 0.9715, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.7177914110429446, |
|
"grad_norm": 2.082737830638052, |
|
"learning_rate": 9.215409042721553e-06, |
|
"loss": 1.0, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.7331288343558282, |
|
"grad_norm": 2.3756819460619534, |
|
"learning_rate": 9.037594154617811e-06, |
|
"loss": 0.9755, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.7484662576687118, |
|
"grad_norm": 2.7832428710692265, |
|
"learning_rate": 8.860085901094595e-06, |
|
"loss": 0.9694, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.7638036809815951, |
|
"grad_norm": 2.299830262573217, |
|
"learning_rate": 8.682940838509206e-06, |
|
"loss": 0.8829, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.7791411042944785, |
|
"grad_norm": 1.9164299833335463, |
|
"learning_rate": 8.50621540750175e-06, |
|
"loss": 0.9634, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.794478527607362, |
|
"grad_norm": 2.0803428687947023, |
|
"learning_rate": 8.329965915012451e-06, |
|
"loss": 1.0047, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.8098159509202454, |
|
"grad_norm": 1.961542308344144, |
|
"learning_rate": 8.154248516341547e-06, |
|
"loss": 0.9783, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.8251533742331287, |
|
"grad_norm": 1.98951654932812, |
|
"learning_rate": 7.979119197257505e-06, |
|
"loss": 1.0009, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.8404907975460123, |
|
"grad_norm": 2.1181463512012653, |
|
"learning_rate": 7.804633756159258e-06, |
|
"loss": 0.9263, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.8558282208588959, |
|
"grad_norm": 2.017971907316727, |
|
"learning_rate": 7.63084778629813e-06, |
|
"loss": 0.9933, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.871165644171779, |
|
"grad_norm": 2.1164163856970135, |
|
"learning_rate": 7.4578166580651335e-06, |
|
"loss": 0.9392, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.8865030674846626, |
|
"grad_norm": 2.118557747327772, |
|
"learning_rate": 7.285595501349259e-06, |
|
"loss": 0.9537, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.9018404907975461, |
|
"grad_norm": 2.087163676278093, |
|
"learning_rate": 7.114239187972416e-06, |
|
"loss": 0.8722, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.9171779141104295, |
|
"grad_norm": 2.222443842922782, |
|
"learning_rate": 6.94380231420656e-06, |
|
"loss": 0.9518, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.9325153374233128, |
|
"grad_norm": 2.0519691059771503, |
|
"learning_rate": 6.774339183378663e-06, |
|
"loss": 0.9595, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.9478527607361964, |
|
"grad_norm": 2.395088570054642, |
|
"learning_rate": 6.605903788568962e-06, |
|
"loss": 0.9082, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.9631901840490797, |
|
"grad_norm": 4.89604575465679, |
|
"learning_rate": 6.438549795408107e-06, |
|
"loss": 0.9101, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.978527607361963, |
|
"grad_norm": 2.1171931357011236, |
|
"learning_rate": 6.272330524978613e-06, |
|
"loss": 0.9687, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.9938650306748467, |
|
"grad_norm": 2.0986919385384137, |
|
"learning_rate": 6.107298936826086e-06, |
|
"loss": 0.8841, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_loss": 1.2134720087051392, |
|
"eval_runtime": 7.1411, |
|
"eval_samples_per_second": 22.266, |
|
"eval_steps_per_second": 5.601, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 2.0092024539877302, |
|
"grad_norm": 2.1628044565671556, |
|
"learning_rate": 5.943507612085661e-06, |
|
"loss": 0.8251, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 2.0245398773006134, |
|
"grad_norm": 1.980162767088004, |
|
"learning_rate": 5.781008736728975e-06, |
|
"loss": 0.6487, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.039877300613497, |
|
"grad_norm": 2.862228737237209, |
|
"learning_rate": 5.619854084937085e-06, |
|
"loss": 0.6706, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 2.0552147239263805, |
|
"grad_norm": 2.2439249118077136, |
|
"learning_rate": 5.460095002604533e-06, |
|
"loss": 0.6652, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.0705521472392636, |
|
"grad_norm": 2.254257809075137, |
|
"learning_rate": 5.3017823909799295e-06, |
|
"loss": 0.7293, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 2.085889570552147, |
|
"grad_norm": 2.223082549239095, |
|
"learning_rate": 5.144966690448159e-06, |
|
"loss": 0.755, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.1012269938650308, |
|
"grad_norm": 2.0645657168776617, |
|
"learning_rate": 4.9896978644594516e-06, |
|
"loss": 0.5896, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 2.116564417177914, |
|
"grad_norm": 2.543974903041173, |
|
"learning_rate": 4.836025383610382e-06, |
|
"loss": 0.7457, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.1319018404907975, |
|
"grad_norm": 2.1096988101915524, |
|
"learning_rate": 4.683998209881943e-06, |
|
"loss": 0.6847, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 2.147239263803681, |
|
"grad_norm": 2.36099194700885, |
|
"learning_rate": 4.533664781039622e-06, |
|
"loss": 0.715, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.1625766871165646, |
|
"grad_norm": 2.633833128566785, |
|
"learning_rate": 4.385072995200532e-06, |
|
"loss": 0.626, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 2.1779141104294477, |
|
"grad_norm": 2.506530086487105, |
|
"learning_rate": 4.2382701955724724e-06, |
|
"loss": 0.7172, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.1932515337423313, |
|
"grad_norm": 2.3802287660865553, |
|
"learning_rate": 4.093303155369771e-06, |
|
"loss": 0.6839, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 2.208588957055215, |
|
"grad_norm": 2.24220538972355, |
|
"learning_rate": 3.950218062910776e-06, |
|
"loss": 0.5827, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.223926380368098, |
|
"grad_norm": 2.2370918425887267, |
|
"learning_rate": 3.8090605069016596e-06, |
|
"loss": 0.6599, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 2.2392638036809815, |
|
"grad_norm": 2.1088652822907448, |
|
"learning_rate": 3.6698754619112974e-06, |
|
"loss": 0.7594, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.254601226993865, |
|
"grad_norm": 2.083592158309122, |
|
"learning_rate": 3.53270727404179e-06, |
|
"loss": 0.6702, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 2.2699386503067487, |
|
"grad_norm": 2.567792989862267, |
|
"learning_rate": 3.3975996467992557e-06, |
|
"loss": 0.6419, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.285276073619632, |
|
"grad_norm": 2.334913734729685, |
|
"learning_rate": 3.2645956271693257e-06, |
|
"loss": 0.6634, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 2.3006134969325154, |
|
"grad_norm": 2.3429523817504716, |
|
"learning_rate": 3.133737591901864e-06, |
|
"loss": 0.6939, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.315950920245399, |
|
"grad_norm": 2.2619584916199615, |
|
"learning_rate": 3.0050672340091723e-06, |
|
"loss": 0.6678, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 2.331288343558282, |
|
"grad_norm": 2.6296479257995777, |
|
"learning_rate": 2.878625549482084e-06, |
|
"loss": 0.7031, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.3466257668711656, |
|
"grad_norm": 2.8130762384906864, |
|
"learning_rate": 2.7544528242281323e-06, |
|
"loss": 0.5788, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 2.361963190184049, |
|
"grad_norm": 2.150686530040835, |
|
"learning_rate": 2.6325886212359496e-06, |
|
"loss": 0.6077, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.3773006134969323, |
|
"grad_norm": 2.2806678502354476, |
|
"learning_rate": 2.51307176797001e-06, |
|
"loss": 0.7225, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 2.392638036809816, |
|
"grad_norm": 2.257367372279919, |
|
"learning_rate": 2.395940343999691e-06, |
|
"loss": 0.66, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.4079754601226995, |
|
"grad_norm": 2.105998185034246, |
|
"learning_rate": 2.2812316688666735e-06, |
|
"loss": 0.5642, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 2.4233128834355826, |
|
"grad_norm": 2.6751693267999164, |
|
"learning_rate": 2.1689822901944456e-06, |
|
"loss": 0.6999, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.438650306748466, |
|
"grad_norm": 2.337986334515246, |
|
"learning_rate": 2.0592279720437856e-06, |
|
"loss": 0.6405, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 2.4539877300613497, |
|
"grad_norm": 2.5090481285811026, |
|
"learning_rate": 1.9520036835178667e-06, |
|
"loss": 0.7335, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.4693251533742333, |
|
"grad_norm": 2.222070003754594, |
|
"learning_rate": 1.8473435876206792e-06, |
|
"loss": 0.6446, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 2.4846625766871164, |
|
"grad_norm": 2.3534318368821467, |
|
"learning_rate": 1.74528103037226e-06, |
|
"loss": 0.7604, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"grad_norm": 2.1957922076650225, |
|
"learning_rate": 1.645848530184233e-06, |
|
"loss": 0.6602, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 2.5153374233128836, |
|
"grad_norm": 2.248469614521019, |
|
"learning_rate": 1.5490777674990376e-06, |
|
"loss": 0.7518, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.530674846625767, |
|
"grad_norm": 2.358970858496667, |
|
"learning_rate": 1.4549995746961332e-06, |
|
"loss": 0.6974, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 2.5460122699386503, |
|
"grad_norm": 2.409137169378918, |
|
"learning_rate": 1.3636439262684299e-06, |
|
"loss": 0.6054, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.561349693251534, |
|
"grad_norm": 2.3008344145258275, |
|
"learning_rate": 1.2750399292720284e-06, |
|
"loss": 0.6626, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 2.5766871165644174, |
|
"grad_norm": 2.2498393839801403, |
|
"learning_rate": 1.1892158140523546e-06, |
|
"loss": 0.6923, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.5920245398773005, |
|
"grad_norm": 2.287309751844316, |
|
"learning_rate": 1.1061989252496053e-06, |
|
"loss": 0.6596, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 2.607361963190184, |
|
"grad_norm": 3.2190551280360937, |
|
"learning_rate": 1.0260157130864178e-06, |
|
"loss": 0.6771, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.6226993865030677, |
|
"grad_norm": 2.1948501519870782, |
|
"learning_rate": 9.486917249404815e-07, |
|
"loss": 0.6886, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 2.638036809815951, |
|
"grad_norm": 2.1803684507178955, |
|
"learning_rate": 8.742515972048404e-07, |
|
"loss": 0.7121, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.6533742331288344, |
|
"grad_norm": 2.089857540368277, |
|
"learning_rate": 8.027190474384127e-07, |
|
"loss": 0.5654, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 2.668711656441718, |
|
"grad_norm": 2.900973015953986, |
|
"learning_rate": 7.341168668092857e-07, |
|
"loss": 0.5899, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.684049079754601, |
|
"grad_norm": 2.269220220981122, |
|
"learning_rate": 6.684669128331655e-07, |
|
"loss": 0.7626, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 2.6993865030674846, |
|
"grad_norm": 2.177246970471886, |
|
"learning_rate": 6.057901024092949e-07, |
|
"loss": 0.6272, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.714723926380368, |
|
"grad_norm": 2.265583420009415, |
|
"learning_rate": 5.461064051560705e-07, |
|
"loss": 0.7539, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 2.7300613496932513, |
|
"grad_norm": 2.097337573364251, |
|
"learning_rate": 4.894348370484648e-07, |
|
"loss": 0.6404, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.745398773006135, |
|
"grad_norm": 2.6099811023622483, |
|
"learning_rate": 4.3579345435930454e-07, |
|
"loss": 0.781, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 2.7607361963190185, |
|
"grad_norm": 2.2217027523814155, |
|
"learning_rate": 3.851993479063154e-07, |
|
"loss": 0.7015, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.7760736196319016, |
|
"grad_norm": 2.325907671442105, |
|
"learning_rate": 3.3766863760676947e-07, |
|
"loss": 0.6844, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 2.791411042944785, |
|
"grad_norm": 2.264535197628352, |
|
"learning_rate": 2.93216467341475e-07, |
|
"loss": 0.6508, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.8067484662576687, |
|
"grad_norm": 2.4713808636104533, |
|
"learning_rate": 2.5185700012975603e-07, |
|
"loss": 0.7762, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 2.8220858895705523, |
|
"grad_norm": 2.402679896607673, |
|
"learning_rate": 2.1360341361692517e-07, |
|
"loss": 0.6639, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.837423312883436, |
|
"grad_norm": 2.179790948612016, |
|
"learning_rate": 1.784678958757291e-07, |
|
"loss": 0.65, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 2.852760736196319, |
|
"grad_norm": 2.076797574324857, |
|
"learning_rate": 1.464616415230702e-07, |
|
"loss": 0.6342, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.8680981595092025, |
|
"grad_norm": 2.1047162156910804, |
|
"learning_rate": 1.1759484815326294e-07, |
|
"loss": 0.6047, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 2.883435582822086, |
|
"grad_norm": 2.1660216273789237, |
|
"learning_rate": 9.187671308895418e-08, |
|
"loss": 0.6571, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.8987730061349692, |
|
"grad_norm": 2.713364435220778, |
|
"learning_rate": 6.931543045073708e-08, |
|
"loss": 0.8338, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 2.914110429447853, |
|
"grad_norm": 2.6713007123159724, |
|
"learning_rate": 4.991818854640396e-08, |
|
"loss": 0.6373, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.9294478527607364, |
|
"grad_norm": 2.1069338794602723, |
|
"learning_rate": 3.369116758066171e-08, |
|
"loss": 0.6385, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 2.9447852760736195, |
|
"grad_norm": 2.312520932069723, |
|
"learning_rate": 2.063953768603799e-08, |
|
"loss": 0.7596, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.960122699386503, |
|
"grad_norm": 2.1046770032748077, |
|
"learning_rate": 1.0767457275615567e-08, |
|
"loss": 0.634, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 2.9754601226993866, |
|
"grad_norm": 2.207645312141143, |
|
"learning_rate": 4.0780717181077015e-09, |
|
"loss": 0.7555, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.9907975460122698, |
|
"grad_norm": 2.457929371194042, |
|
"learning_rate": 5.735123357042405e-10, |
|
"loss": 0.7012, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_loss": 1.3543522357940674, |
|
"eval_runtime": 6.5158, |
|
"eval_samples_per_second": 24.402, |
|
"eval_steps_per_second": 6.139, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 978, |
|
"total_flos": 3642602029056.0, |
|
"train_loss": 1.0360665048314506, |
|
"train_runtime": 586.176, |
|
"train_samples_per_second": 6.674, |
|
"train_steps_per_second": 1.668 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 978, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": false, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3642602029056.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|