|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 5779, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 5.7683433317951084e-05, |
|
"grad_norm": 0.805375337600708, |
|
"learning_rate": 1.1534025374855825e-07, |
|
"loss": 1.1819, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0002884171665897554, |
|
"grad_norm": 0.6597965359687805, |
|
"learning_rate": 5.767012687427913e-07, |
|
"loss": 1.0888, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0005768343331795108, |
|
"grad_norm": 0.8368421792984009, |
|
"learning_rate": 1.1534025374855826e-06, |
|
"loss": 1.2132, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0008652514997692663, |
|
"grad_norm": 0.9829678535461426, |
|
"learning_rate": 1.7301038062283738e-06, |
|
"loss": 1.1889, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0011536686663590216, |
|
"grad_norm": 0.7449169158935547, |
|
"learning_rate": 2.3068050749711653e-06, |
|
"loss": 1.2103, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.001442085832948777, |
|
"grad_norm": 0.7239571809768677, |
|
"learning_rate": 2.8835063437139563e-06, |
|
"loss": 1.1712, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0017305029995385325, |
|
"grad_norm": 0.6156097650527954, |
|
"learning_rate": 3.4602076124567477e-06, |
|
"loss": 1.0438, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0020189201661282878, |
|
"grad_norm": 0.7826522588729858, |
|
"learning_rate": 4.036908881199539e-06, |
|
"loss": 1.212, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.0023073373327180432, |
|
"grad_norm": 0.6530287861824036, |
|
"learning_rate": 4.6136101499423305e-06, |
|
"loss": 1.1553, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0025957544993077987, |
|
"grad_norm": 0.7508683204650879, |
|
"learning_rate": 5.190311418685121e-06, |
|
"loss": 1.1354, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.002884171665897554, |
|
"grad_norm": 0.6567290425300598, |
|
"learning_rate": 5.7670126874279126e-06, |
|
"loss": 1.1375, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0031725888324873096, |
|
"grad_norm": 0.5964232683181763, |
|
"learning_rate": 6.3437139561707036e-06, |
|
"loss": 1.1193, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.003461005999077065, |
|
"grad_norm": 0.7864587306976318, |
|
"learning_rate": 6.920415224913495e-06, |
|
"loss": 1.1051, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0037494231656668205, |
|
"grad_norm": 0.643945574760437, |
|
"learning_rate": 7.497116493656286e-06, |
|
"loss": 1.107, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.0040378403322565756, |
|
"grad_norm": 0.6281377077102661, |
|
"learning_rate": 8.073817762399077e-06, |
|
"loss": 1.0673, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.0043262574988463314, |
|
"grad_norm": 0.6308133006095886, |
|
"learning_rate": 8.650519031141868e-06, |
|
"loss": 1.0958, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.0046146746654360865, |
|
"grad_norm": 0.566056489944458, |
|
"learning_rate": 9.227220299884661e-06, |
|
"loss": 1.0433, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.004903091832025842, |
|
"grad_norm": 0.6622409820556641, |
|
"learning_rate": 9.803921568627451e-06, |
|
"loss": 0.9888, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.005191508998615597, |
|
"grad_norm": 0.6094119548797607, |
|
"learning_rate": 1.0380622837370241e-05, |
|
"loss": 1.1145, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.005479926165205353, |
|
"grad_norm": 0.6860048770904541, |
|
"learning_rate": 1.0957324106113035e-05, |
|
"loss": 1.0594, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.005768343331795108, |
|
"grad_norm": 0.6527470946311951, |
|
"learning_rate": 1.1534025374855825e-05, |
|
"loss": 0.9886, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.006056760498384864, |
|
"grad_norm": 0.6624901294708252, |
|
"learning_rate": 1.2110726643598615e-05, |
|
"loss": 1.0142, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.006345177664974619, |
|
"grad_norm": 0.680112361907959, |
|
"learning_rate": 1.2687427912341407e-05, |
|
"loss": 0.9999, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.006633594831564375, |
|
"grad_norm": 0.6242687702178955, |
|
"learning_rate": 1.3264129181084197e-05, |
|
"loss": 0.9571, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.00692201199815413, |
|
"grad_norm": 0.6047985553741455, |
|
"learning_rate": 1.384083044982699e-05, |
|
"loss": 1.0342, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.007210429164743885, |
|
"grad_norm": 0.650623083114624, |
|
"learning_rate": 1.4417531718569783e-05, |
|
"loss": 1.0183, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.007498846331333641, |
|
"grad_norm": 0.5915358066558838, |
|
"learning_rate": 1.4994232987312573e-05, |
|
"loss": 1.0469, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.007787263497923396, |
|
"grad_norm": 0.653743326663971, |
|
"learning_rate": 1.5570934256055363e-05, |
|
"loss": 1.0459, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.008075680664513151, |
|
"grad_norm": 0.6202878355979919, |
|
"learning_rate": 1.6147635524798155e-05, |
|
"loss": 1.047, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.008364097831102908, |
|
"grad_norm": 0.6978910565376282, |
|
"learning_rate": 1.6724336793540947e-05, |
|
"loss": 1.0783, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.008652514997692663, |
|
"grad_norm": 0.6709557771682739, |
|
"learning_rate": 1.7301038062283735e-05, |
|
"loss": 1.0576, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.008940932164282418, |
|
"grad_norm": 0.6336321830749512, |
|
"learning_rate": 1.787773933102653e-05, |
|
"loss": 1.0142, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.009229349330872173, |
|
"grad_norm": 0.6070622801780701, |
|
"learning_rate": 1.8454440599769322e-05, |
|
"loss": 0.9873, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.00951776649746193, |
|
"grad_norm": 0.6129010915756226, |
|
"learning_rate": 1.903114186851211e-05, |
|
"loss": 0.9731, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.009806183664051685, |
|
"grad_norm": 0.7349147200584412, |
|
"learning_rate": 1.9607843137254903e-05, |
|
"loss": 1.0242, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.01009460083064144, |
|
"grad_norm": 0.6842703223228455, |
|
"learning_rate": 2.0184544405997694e-05, |
|
"loss": 1.1214, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.010383017997231195, |
|
"grad_norm": 0.7253619432449341, |
|
"learning_rate": 2.0761245674740483e-05, |
|
"loss": 1.0322, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.01067143516382095, |
|
"grad_norm": 0.7551470994949341, |
|
"learning_rate": 2.1337946943483278e-05, |
|
"loss": 0.9613, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.010959852330410707, |
|
"grad_norm": 0.6953349113464355, |
|
"learning_rate": 2.191464821222607e-05, |
|
"loss": 1.075, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.011248269497000462, |
|
"grad_norm": 0.6868691444396973, |
|
"learning_rate": 2.249134948096886e-05, |
|
"loss": 1.0639, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.011536686663590217, |
|
"grad_norm": 0.7638917565345764, |
|
"learning_rate": 2.306805074971165e-05, |
|
"loss": 1.0104, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.011825103830179972, |
|
"grad_norm": 0.6889998316764832, |
|
"learning_rate": 2.3644752018454442e-05, |
|
"loss": 1.0104, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.012113520996769728, |
|
"grad_norm": 0.6709694862365723, |
|
"learning_rate": 2.422145328719723e-05, |
|
"loss": 1.0671, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.012401938163359483, |
|
"grad_norm": 0.7222777605056763, |
|
"learning_rate": 2.4798154555940022e-05, |
|
"loss": 0.9234, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.012690355329949238, |
|
"grad_norm": 0.8851092457771301, |
|
"learning_rate": 2.5374855824682814e-05, |
|
"loss": 1.0463, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.012978772496538993, |
|
"grad_norm": 0.7736419439315796, |
|
"learning_rate": 2.5951557093425606e-05, |
|
"loss": 1.0129, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.01326718966312875, |
|
"grad_norm": 0.853778064250946, |
|
"learning_rate": 2.6528258362168395e-05, |
|
"loss": 1.0253, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.013555606829718505, |
|
"grad_norm": 0.7991467714309692, |
|
"learning_rate": 2.7104959630911193e-05, |
|
"loss": 0.9774, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.01384402399630826, |
|
"grad_norm": 0.718079149723053, |
|
"learning_rate": 2.768166089965398e-05, |
|
"loss": 1.0044, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.014132441162898015, |
|
"grad_norm": 0.7406652569770813, |
|
"learning_rate": 2.8258362168396773e-05, |
|
"loss": 1.0082, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.01442085832948777, |
|
"grad_norm": 0.7190337181091309, |
|
"learning_rate": 2.8835063437139565e-05, |
|
"loss": 0.9858, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.014709275496077527, |
|
"grad_norm": 0.7842580676078796, |
|
"learning_rate": 2.9411764705882354e-05, |
|
"loss": 1.0378, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.014997692662667282, |
|
"grad_norm": 0.7193072438240051, |
|
"learning_rate": 2.9988465974625146e-05, |
|
"loss": 1.0055, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.015286109829257037, |
|
"grad_norm": 0.7311102151870728, |
|
"learning_rate": 3.0565167243367934e-05, |
|
"loss": 0.9497, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.015574526995846792, |
|
"grad_norm": 0.7522133588790894, |
|
"learning_rate": 3.1141868512110726e-05, |
|
"loss": 1.0437, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.015862944162436547, |
|
"grad_norm": 0.7482451796531677, |
|
"learning_rate": 3.171856978085352e-05, |
|
"loss": 0.9474, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.016151361329026302, |
|
"grad_norm": 0.7732366919517517, |
|
"learning_rate": 3.229527104959631e-05, |
|
"loss": 0.9688, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.01643977849561606, |
|
"grad_norm": 0.746648907661438, |
|
"learning_rate": 3.28719723183391e-05, |
|
"loss": 0.914, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.016728195662205816, |
|
"grad_norm": 0.6986602544784546, |
|
"learning_rate": 3.344867358708189e-05, |
|
"loss": 0.9613, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.01701661282879557, |
|
"grad_norm": 0.7043907642364502, |
|
"learning_rate": 3.4025374855824685e-05, |
|
"loss": 0.9693, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.017305029995385326, |
|
"grad_norm": 0.7637396454811096, |
|
"learning_rate": 3.460207612456747e-05, |
|
"loss": 0.9629, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.01759344716197508, |
|
"grad_norm": 0.7434666156768799, |
|
"learning_rate": 3.517877739331027e-05, |
|
"loss": 1.0924, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.017881864328564836, |
|
"grad_norm": 0.7435088157653809, |
|
"learning_rate": 3.575547866205306e-05, |
|
"loss": 1.0479, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.01817028149515459, |
|
"grad_norm": 0.6840139031410217, |
|
"learning_rate": 3.633217993079585e-05, |
|
"loss": 1.0394, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.018458698661744346, |
|
"grad_norm": 0.6994503736495972, |
|
"learning_rate": 3.6908881199538644e-05, |
|
"loss": 1.0005, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.0187471158283341, |
|
"grad_norm": 0.6722626686096191, |
|
"learning_rate": 3.748558246828143e-05, |
|
"loss": 0.9682, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.01903553299492386, |
|
"grad_norm": 0.6701967716217041, |
|
"learning_rate": 3.806228373702422e-05, |
|
"loss": 0.9973, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.019323950161513614, |
|
"grad_norm": 0.658715546131134, |
|
"learning_rate": 3.863898500576701e-05, |
|
"loss": 0.9848, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.01961236732810337, |
|
"grad_norm": 0.7802160382270813, |
|
"learning_rate": 3.9215686274509805e-05, |
|
"loss": 0.9661, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.019900784494693124, |
|
"grad_norm": 0.8040672540664673, |
|
"learning_rate": 3.97923875432526e-05, |
|
"loss": 0.9959, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.02018920166128288, |
|
"grad_norm": 0.687023937702179, |
|
"learning_rate": 4.036908881199539e-05, |
|
"loss": 0.9384, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.020477618827872635, |
|
"grad_norm": 0.6872786283493042, |
|
"learning_rate": 4.094579008073818e-05, |
|
"loss": 0.9794, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.02076603599446239, |
|
"grad_norm": 0.6797603368759155, |
|
"learning_rate": 4.1522491349480966e-05, |
|
"loss": 1.013, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.021054453161052145, |
|
"grad_norm": 0.6620325446128845, |
|
"learning_rate": 4.209919261822376e-05, |
|
"loss": 0.9782, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.0213428703276419, |
|
"grad_norm": 0.7120839357376099, |
|
"learning_rate": 4.2675893886966556e-05, |
|
"loss": 1.001, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.021631287494231658, |
|
"grad_norm": 0.7110047936439514, |
|
"learning_rate": 4.325259515570935e-05, |
|
"loss": 1.0208, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.021919704660821413, |
|
"grad_norm": 0.674694299697876, |
|
"learning_rate": 4.382929642445214e-05, |
|
"loss": 0.976, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.022208121827411168, |
|
"grad_norm": 0.6173574924468994, |
|
"learning_rate": 4.440599769319493e-05, |
|
"loss": 0.8755, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.022496538994000923, |
|
"grad_norm": 0.6821759343147278, |
|
"learning_rate": 4.498269896193772e-05, |
|
"loss": 1.0689, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.022784956160590678, |
|
"grad_norm": 0.709820568561554, |
|
"learning_rate": 4.555940023068051e-05, |
|
"loss": 1.0277, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.023073373327180433, |
|
"grad_norm": 0.6610621809959412, |
|
"learning_rate": 4.61361014994233e-05, |
|
"loss": 1.0061, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.023361790493770188, |
|
"grad_norm": 0.7234196662902832, |
|
"learning_rate": 4.671280276816609e-05, |
|
"loss": 1.0004, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.023650207660359943, |
|
"grad_norm": 0.7094035744667053, |
|
"learning_rate": 4.7289504036908884e-05, |
|
"loss": 0.9914, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.0239386248269497, |
|
"grad_norm": 0.6732367873191833, |
|
"learning_rate": 4.7866205305651676e-05, |
|
"loss": 0.9613, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.024227041993539457, |
|
"grad_norm": 0.6192376613616943, |
|
"learning_rate": 4.844290657439446e-05, |
|
"loss": 1.0058, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.024515459160129212, |
|
"grad_norm": 0.6171934604644775, |
|
"learning_rate": 4.901960784313725e-05, |
|
"loss": 1.027, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.024803876326718967, |
|
"grad_norm": 0.6176871657371521, |
|
"learning_rate": 4.9596309111880045e-05, |
|
"loss": 0.9524, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.025092293493308722, |
|
"grad_norm": 0.6524646878242493, |
|
"learning_rate": 5.017301038062284e-05, |
|
"loss": 1.0202, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.025380710659898477, |
|
"grad_norm": 0.6370823979377747, |
|
"learning_rate": 5.074971164936563e-05, |
|
"loss": 0.9185, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.025669127826488232, |
|
"grad_norm": 0.6290232539176941, |
|
"learning_rate": 5.132641291810843e-05, |
|
"loss": 1.0323, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.025957544993077987, |
|
"grad_norm": 0.6379473805427551, |
|
"learning_rate": 5.190311418685121e-05, |
|
"loss": 0.9921, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.026245962159667742, |
|
"grad_norm": 0.6444252729415894, |
|
"learning_rate": 5.2479815455594004e-05, |
|
"loss": 1.0232, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.0265343793262575, |
|
"grad_norm": 0.6765018105506897, |
|
"learning_rate": 5.305651672433679e-05, |
|
"loss": 0.9987, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.026822796492847256, |
|
"grad_norm": 0.6089096069335938, |
|
"learning_rate": 5.363321799307959e-05, |
|
"loss": 0.9996, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.02711121365943701, |
|
"grad_norm": 0.6402391195297241, |
|
"learning_rate": 5.4209919261822386e-05, |
|
"loss": 0.9029, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.027399630826026766, |
|
"grad_norm": 0.616694450378418, |
|
"learning_rate": 5.478662053056517e-05, |
|
"loss": 0.9625, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.02768804799261652, |
|
"grad_norm": 0.6394984126091003, |
|
"learning_rate": 5.536332179930796e-05, |
|
"loss": 0.984, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.027976465159206276, |
|
"grad_norm": 0.6072252988815308, |
|
"learning_rate": 5.594002306805075e-05, |
|
"loss": 0.9278, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.02826488232579603, |
|
"grad_norm": 0.6287209987640381, |
|
"learning_rate": 5.651672433679355e-05, |
|
"loss": 1.011, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.028553299492385786, |
|
"grad_norm": 0.610227644443512, |
|
"learning_rate": 5.709342560553633e-05, |
|
"loss": 0.9999, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.02884171665897554, |
|
"grad_norm": 0.5862972736358643, |
|
"learning_rate": 5.767012687427913e-05, |
|
"loss": 0.9724, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.08738536078906385, |
|
"grad_norm": 0.5334975719451904, |
|
"learning_rate": 0.00017474048442906573, |
|
"loss": 0.9612, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.08825056238103478, |
|
"grad_norm": 0.4906371831893921, |
|
"learning_rate": 0.00017647058823529413, |
|
"loss": 1.0106, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.0891157639730057, |
|
"grad_norm": 0.6553336977958679, |
|
"learning_rate": 0.0001782006920415225, |
|
"loss": 1.0372, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.08998096556497664, |
|
"grad_norm": 0.4532735347747803, |
|
"learning_rate": 0.00017993079584775087, |
|
"loss": 1.0166, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.09084616715694757, |
|
"grad_norm": 0.474738210439682, |
|
"learning_rate": 0.00018166089965397926, |
|
"loss": 0.9979, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.0917113687489185, |
|
"grad_norm": 0.4620087146759033, |
|
"learning_rate": 0.0001833910034602076, |
|
"loss": 1.0254, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.09257657034088942, |
|
"grad_norm": 0.4435741901397705, |
|
"learning_rate": 0.000185121107266436, |
|
"loss": 0.9888, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.09344177193286035, |
|
"grad_norm": 0.43698352575302124, |
|
"learning_rate": 0.00018685121107266437, |
|
"loss": 0.9884, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.09430697352483129, |
|
"grad_norm": 0.41308143734931946, |
|
"learning_rate": 0.00018858131487889274, |
|
"loss": 1.0003, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.09517217511680222, |
|
"grad_norm": 0.4729122817516327, |
|
"learning_rate": 0.00019031141868512113, |
|
"loss": 0.9812, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.09603737670877315, |
|
"grad_norm": 0.42877742648124695, |
|
"learning_rate": 0.00019204152249134948, |
|
"loss": 0.9832, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.09690257830074407, |
|
"grad_norm": 0.4206876754760742, |
|
"learning_rate": 0.00019377162629757784, |
|
"loss": 1.009, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.097767779892715, |
|
"grad_norm": 0.4647689461708069, |
|
"learning_rate": 0.00019550173010380624, |
|
"loss": 0.945, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.09863298148468594, |
|
"grad_norm": 0.41559451818466187, |
|
"learning_rate": 0.0001972318339100346, |
|
"loss": 1.0342, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.09949818307665687, |
|
"grad_norm": 0.44591063261032104, |
|
"learning_rate": 0.000198961937716263, |
|
"loss": 0.9927, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.10036338466862779, |
|
"grad_norm": 0.41210636496543884, |
|
"learning_rate": 0.00019999992702804517, |
|
"loss": 0.9835, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.10122858626059872, |
|
"grad_norm": 0.4488976001739502, |
|
"learning_rate": 0.0001999991060947763, |
|
"loss": 0.9783, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.10209378785256965, |
|
"grad_norm": 0.49996423721313477, |
|
"learning_rate": 0.0001999973730208081, |
|
"loss": 1.0076, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.10295898944454057, |
|
"grad_norm": 0.3953074812889099, |
|
"learning_rate": 0.00019999472782194876, |
|
"loss": 0.9747, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.10382419103651151, |
|
"grad_norm": 0.42277634143829346, |
|
"learning_rate": 0.0001999911705223265, |
|
"loss": 0.9959, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.10468939262848244, |
|
"grad_norm": 0.4215725064277649, |
|
"learning_rate": 0.00019998670115438909, |
|
"loss": 1.0212, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.10555459422045337, |
|
"grad_norm": 0.4542864263057709, |
|
"learning_rate": 0.0001999813197589039, |
|
"loss": 0.9818, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.10641979581242429, |
|
"grad_norm": 0.4049575626850128, |
|
"learning_rate": 0.00019997502638495723, |
|
"loss": 0.9868, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.10728499740439522, |
|
"grad_norm": 0.4267943799495697, |
|
"learning_rate": 0.00019996782108995404, |
|
"loss": 1.0023, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.10815019899636616, |
|
"grad_norm": 0.3867630660533905, |
|
"learning_rate": 0.00019995970393961733, |
|
"loss": 0.9584, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.10901540058833709, |
|
"grad_norm": 0.4118204116821289, |
|
"learning_rate": 0.00019995067500798758, |
|
"loss": 1.0091, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.10988060218030801, |
|
"grad_norm": 0.4244009852409363, |
|
"learning_rate": 0.00019994073437742214, |
|
"loss": 0.9887, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.11074580377227894, |
|
"grad_norm": 0.37798789143562317, |
|
"learning_rate": 0.0001999298821385943, |
|
"loss": 0.9533, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.11161100536424987, |
|
"grad_norm": 0.4218810796737671, |
|
"learning_rate": 0.00019991811839049263, |
|
"loss": 0.9813, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.1124762069562208, |
|
"grad_norm": 0.419813871383667, |
|
"learning_rate": 0.00019990544324042007, |
|
"loss": 0.9898, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.11334140854819173, |
|
"grad_norm": 0.4155246615409851, |
|
"learning_rate": 0.00019989185680399283, |
|
"loss": 1.0018, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.11420661014016266, |
|
"grad_norm": 0.45885682106018066, |
|
"learning_rate": 0.00019987735920513943, |
|
"loss": 1.0023, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.11507181173213359, |
|
"grad_norm": 0.45299044251441956, |
|
"learning_rate": 0.00019986195057609957, |
|
"loss": 1.0061, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.11593701332410451, |
|
"grad_norm": 0.4102267920970917, |
|
"learning_rate": 0.00019984563105742285, |
|
"loss": 1.0284, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.11680221491607544, |
|
"grad_norm": 0.41383153200149536, |
|
"learning_rate": 0.0001998284007979676, |
|
"loss": 0.9999, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.11766741650804638, |
|
"grad_norm": 0.4205871522426605, |
|
"learning_rate": 0.00019981025995489943, |
|
"loss": 0.9574, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.1185326181000173, |
|
"grad_norm": 0.43072691559791565, |
|
"learning_rate": 0.00019979120869368982, |
|
"loss": 0.9703, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.11939781969198823, |
|
"grad_norm": 0.4149424135684967, |
|
"learning_rate": 0.00019977124718811463, |
|
"loss": 1.0093, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.12026302128395916, |
|
"grad_norm": 0.46796900033950806, |
|
"learning_rate": 0.00019975037562025254, |
|
"loss": 0.9802, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.12112822287593009, |
|
"grad_norm": 0.3972123861312866, |
|
"learning_rate": 0.00019972859418048328, |
|
"loss": 1.0109, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.12199342446790103, |
|
"grad_norm": 0.4356140196323395, |
|
"learning_rate": 0.00019970590306748603, |
|
"loss": 1.0295, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.12285862605987195, |
|
"grad_norm": 0.43844765424728394, |
|
"learning_rate": 0.00019968230248823746, |
|
"loss": 0.9474, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.12372382765184288, |
|
"grad_norm": 0.42332398891448975, |
|
"learning_rate": 0.00019965779265801006, |
|
"loss": 1.0008, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.1245890292438138, |
|
"grad_norm": 0.4387184679508209, |
|
"learning_rate": 0.00019963237380036993, |
|
"loss": 0.9893, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.12545423083578475, |
|
"grad_norm": 0.42322424054145813, |
|
"learning_rate": 0.00019960604614717485, |
|
"loss": 0.9831, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.12631943242775567, |
|
"grad_norm": 0.41627830266952515, |
|
"learning_rate": 0.00019957880993857227, |
|
"loss": 1.006, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.1271846340197266, |
|
"grad_norm": 0.4279492199420929, |
|
"learning_rate": 0.00019955066542299695, |
|
"loss": 0.9847, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.12804983561169753, |
|
"grad_norm": 0.43340563774108887, |
|
"learning_rate": 0.00019952161285716872, |
|
"loss": 0.9798, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.12891503720366845, |
|
"grad_norm": 0.4125729501247406, |
|
"learning_rate": 0.00019949165250609022, |
|
"loss": 0.976, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.12978023879563938, |
|
"grad_norm": 0.43918147683143616, |
|
"learning_rate": 0.0001994607846430445, |
|
"loss": 0.9614, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.1306454403876103, |
|
"grad_norm": 0.43273496627807617, |
|
"learning_rate": 0.00019942900954959244, |
|
"loss": 1.0095, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.13151064197958123, |
|
"grad_norm": 0.43806779384613037, |
|
"learning_rate": 0.0001993963275155701, |
|
"loss": 0.9866, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.13237584357155216, |
|
"grad_norm": 0.43065160512924194, |
|
"learning_rate": 0.00019936273883908637, |
|
"loss": 0.9964, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.13324104516352311, |
|
"grad_norm": 0.41607436537742615, |
|
"learning_rate": 0.0001993282438265199, |
|
"loss": 0.9926, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.13410624675549404, |
|
"grad_norm": 0.419233113527298, |
|
"learning_rate": 0.00019929284279251658, |
|
"loss": 0.995, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.13497144834746497, |
|
"grad_norm": 0.41398414969444275, |
|
"learning_rate": 0.00019925653605998655, |
|
"loss": 0.983, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.1358366499394359, |
|
"grad_norm": 0.43536847829818726, |
|
"learning_rate": 0.00019921932396010122, |
|
"loss": 0.9669, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.13670185153140682, |
|
"grad_norm": 0.41906699538230896, |
|
"learning_rate": 0.00019918120683229043, |
|
"loss": 0.9839, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.13756705312337775, |
|
"grad_norm": 0.47571635246276855, |
|
"learning_rate": 0.00019914218502423898, |
|
"loss": 1.0062, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.13843225471534867, |
|
"grad_norm": 0.4490084648132324, |
|
"learning_rate": 0.00019910225889188397, |
|
"loss": 1.0186, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.1392974563073196, |
|
"grad_norm": 0.43234339356422424, |
|
"learning_rate": 0.00019906142879941107, |
|
"loss": 0.973, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.14016265789929053, |
|
"grad_norm": 0.42632317543029785, |
|
"learning_rate": 0.00019901969511925153, |
|
"loss": 0.9917, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.14102785949126145, |
|
"grad_norm": 0.4437479078769684, |
|
"learning_rate": 0.00019897705823207867, |
|
"loss": 0.9794, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.14189306108323238, |
|
"grad_norm": 0.416469931602478, |
|
"learning_rate": 0.00019893351852680433, |
|
"loss": 0.9611, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.14275826267520333, |
|
"grad_norm": 0.4988054037094116, |
|
"learning_rate": 0.00019888907640057543, |
|
"loss": 0.9644, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.14362346426717426, |
|
"grad_norm": 0.44064489006996155, |
|
"learning_rate": 0.0001988437322587703, |
|
"loss": 0.9768, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.1444886658591452, |
|
"grad_norm": 0.43101176619529724, |
|
"learning_rate": 0.00019879748651499508, |
|
"loss": 1.0167, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.14535386745111611, |
|
"grad_norm": 0.4331214725971222, |
|
"learning_rate": 0.0001987503395910797, |
|
"loss": 0.9585, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.14621906904308704, |
|
"grad_norm": 0.4513319730758667, |
|
"learning_rate": 0.00019870229191707437, |
|
"loss": 0.9678, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.14708427063505797, |
|
"grad_norm": 0.43121349811553955, |
|
"learning_rate": 0.00019865334393124535, |
|
"loss": 0.9701, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.1479494722270289, |
|
"grad_norm": 0.4441729485988617, |
|
"learning_rate": 0.00019860349608007111, |
|
"loss": 0.9771, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.14881467381899982, |
|
"grad_norm": 0.447622686624527, |
|
"learning_rate": 0.00019855274881823833, |
|
"loss": 0.9707, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.14967987541097075, |
|
"grad_norm": 0.439892441034317, |
|
"learning_rate": 0.0001985011026086375, |
|
"loss": 0.945, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.15054507700294167, |
|
"grad_norm": 0.45804309844970703, |
|
"learning_rate": 0.00019844855792235897, |
|
"loss": 1.0298, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.15141027859491263, |
|
"grad_norm": 0.427182674407959, |
|
"learning_rate": 0.00019839511523868847, |
|
"loss": 0.9811, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.15227548018688356, |
|
"grad_norm": 0.397521436214447, |
|
"learning_rate": 0.00019834077504510283, |
|
"loss": 0.9782, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.15314068177885448, |
|
"grad_norm": 0.44817712903022766, |
|
"learning_rate": 0.00019828553783726553, |
|
"loss": 1.0242, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.1540058833708254, |
|
"grad_norm": 0.4743613302707672, |
|
"learning_rate": 0.00019822940411902212, |
|
"loss": 0.9646, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.15487108496279633, |
|
"grad_norm": 0.4440392255783081, |
|
"learning_rate": 0.0001981723744023956, |
|
"loss": 1.013, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.15573628655476726, |
|
"grad_norm": 0.4056445062160492, |
|
"learning_rate": 0.000198114449207582, |
|
"loss": 0.9629, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.1566014881467382, |
|
"grad_norm": 0.44350746273994446, |
|
"learning_rate": 0.0001980556290629452, |
|
"loss": 0.944, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.15746668973870911, |
|
"grad_norm": 0.4516960382461548, |
|
"learning_rate": 0.00019799591450501253, |
|
"loss": 0.9772, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.15833189133068004, |
|
"grad_norm": 0.4214520752429962, |
|
"learning_rate": 0.00019793530607846967, |
|
"loss": 0.959, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.15919709292265097, |
|
"grad_norm": 0.4131547212600708, |
|
"learning_rate": 0.00019787380433615562, |
|
"loss": 0.9922, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.1600622945146219, |
|
"grad_norm": 0.43274828791618347, |
|
"learning_rate": 0.0001978114098390579, |
|
"loss": 0.9896, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.16092749610659285, |
|
"grad_norm": 0.41580265760421753, |
|
"learning_rate": 0.0001977481231563071, |
|
"loss": 0.9829, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.16179269769856378, |
|
"grad_norm": 0.4266924560070038, |
|
"learning_rate": 0.0001976839448651721, |
|
"loss": 1.0039, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.1626578992905347, |
|
"grad_norm": 0.4301856756210327, |
|
"learning_rate": 0.00019761887555105428, |
|
"loss": 0.9754, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.16352310088250563, |
|
"grad_norm": 0.4580918848514557, |
|
"learning_rate": 0.00019755291580748278, |
|
"loss": 0.9878, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.16438830247447656, |
|
"grad_norm": 0.42018818855285645, |
|
"learning_rate": 0.0001974860662361086, |
|
"loss": 1.0021, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.16525350406644748, |
|
"grad_norm": 0.44303232431411743, |
|
"learning_rate": 0.00019741832744669938, |
|
"loss": 0.9844, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.1661187056584184, |
|
"grad_norm": 0.4127476215362549, |
|
"learning_rate": 0.0001973497000571336, |
|
"loss": 0.9969, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.16698390725038934, |
|
"grad_norm": 0.413120299577713, |
|
"learning_rate": 0.00019728018469339532, |
|
"loss": 1.0029, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.16784910884236026, |
|
"grad_norm": 0.41281285881996155, |
|
"learning_rate": 0.00019720978198956807, |
|
"loss": 0.9744, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.1687143104343312, |
|
"grad_norm": 0.42970991134643555, |
|
"learning_rate": 0.00019713849258782933, |
|
"loss": 0.9722, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.16957951202630211, |
|
"grad_norm": 0.4328010678291321, |
|
"learning_rate": 0.00019706631713844455, |
|
"loss": 0.9985, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.17044471361827307, |
|
"grad_norm": 0.42618030309677124, |
|
"learning_rate": 0.00019699325629976127, |
|
"loss": 0.9983, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.171309915210244, |
|
"grad_norm": 0.44706591963768005, |
|
"learning_rate": 0.00019691931073820312, |
|
"loss": 1.0262, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.17217511680221492, |
|
"grad_norm": 0.41572490334510803, |
|
"learning_rate": 0.00019684448112826361, |
|
"loss": 0.9898, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.17304031839418585, |
|
"grad_norm": 0.4546913206577301, |
|
"learning_rate": 0.00019676876815250024, |
|
"loss": 0.9845, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.17390551998615678, |
|
"grad_norm": 0.41583436727523804, |
|
"learning_rate": 0.00019669217250152806, |
|
"loss": 0.9786, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.1747707215781277, |
|
"grad_norm": 0.4279019236564636, |
|
"learning_rate": 0.00019661469487401335, |
|
"loss": 0.9573, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.17563592317009863, |
|
"grad_norm": 0.4380638003349304, |
|
"learning_rate": 0.00019653633597666744, |
|
"loss": 0.9666, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.17650112476206956, |
|
"grad_norm": 0.4483678638935089, |
|
"learning_rate": 0.00019645709652424016, |
|
"loss": 1.0032, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.17736632635404048, |
|
"grad_norm": 0.4582922160625458, |
|
"learning_rate": 0.0001963769772395132, |
|
"loss": 1.0026, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.1782315279460114, |
|
"grad_norm": 0.39407840371131897, |
|
"learning_rate": 0.00019629597885329373, |
|
"loss": 0.9871, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.17909672953798236, |
|
"grad_norm": 0.4242671728134155, |
|
"learning_rate": 0.00019621410210440759, |
|
"loss": 1.006, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.1799619311299533, |
|
"grad_norm": 0.4255340099334717, |
|
"learning_rate": 0.00019613134773969256, |
|
"loss": 1.036, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.18082713272192422, |
|
"grad_norm": 0.4595358967781067, |
|
"learning_rate": 0.00019604771651399175, |
|
"loss": 0.9635, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.18169233431389514, |
|
"grad_norm": 0.4306567907333374, |
|
"learning_rate": 0.0001959632091901463, |
|
"loss": 0.9313, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.18255753590586607, |
|
"grad_norm": 0.4518164396286011, |
|
"learning_rate": 0.00019587782653898884, |
|
"loss": 1.0311, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.183422737497837, |
|
"grad_norm": 0.41974586248397827, |
|
"learning_rate": 0.00019579156933933632, |
|
"loss": 0.9976, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.18428793908980792, |
|
"grad_norm": 0.4630170464515686, |
|
"learning_rate": 0.00019570443837798265, |
|
"loss": 0.9915, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.18515314068177885, |
|
"grad_norm": 0.4572630524635315, |
|
"learning_rate": 0.00019561643444969203, |
|
"loss": 1.0228, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.18601834227374978, |
|
"grad_norm": 0.46339964866638184, |
|
"learning_rate": 0.00019552755835719116, |
|
"loss": 0.9616, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.1868835438657207, |
|
"grad_norm": 0.43660199642181396, |
|
"learning_rate": 0.00019543781091116243, |
|
"loss": 0.9879, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.18774874545769163, |
|
"grad_norm": 0.4161589741706848, |
|
"learning_rate": 0.00019534719293023604, |
|
"loss": 0.9961, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.18861394704966258, |
|
"grad_norm": 0.4922807216644287, |
|
"learning_rate": 0.00019525570524098288, |
|
"loss": 1.0022, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.1894791486416335, |
|
"grad_norm": 0.4476989805698395, |
|
"learning_rate": 0.0001951633486779069, |
|
"loss": 1.007, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.19034435023360444, |
|
"grad_norm": 0.4585910141468048, |
|
"learning_rate": 0.0001950701240834374, |
|
"loss": 0.9368, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.19120955182557536, |
|
"grad_norm": 0.440684050321579, |
|
"learning_rate": 0.00019497603230792145, |
|
"loss": 1.0081, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.1920747534175463, |
|
"grad_norm": 0.41014137864112854, |
|
"learning_rate": 0.00019488107420961612, |
|
"loss": 0.971, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.19293995500951722, |
|
"grad_norm": 0.4705316126346588, |
|
"learning_rate": 0.00019478525065468058, |
|
"loss": 0.9501, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.19380515660148814, |
|
"grad_norm": 0.4233025014400482, |
|
"learning_rate": 0.00019468856251716833, |
|
"loss": 1.0, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.19467035819345907, |
|
"grad_norm": 0.47275301814079285, |
|
"learning_rate": 0.00019459101067901906, |
|
"loss": 1.0192, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.19553555978543, |
|
"grad_norm": 0.47677910327911377, |
|
"learning_rate": 0.0001944925960300508, |
|
"loss": 0.9638, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.19640076137740092, |
|
"grad_norm": 0.4414370656013489, |
|
"learning_rate": 0.00019439331946795158, |
|
"loss": 0.9603, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.19726596296937188, |
|
"grad_norm": 0.43756040930747986, |
|
"learning_rate": 0.0001942931818982715, |
|
"loss": 1.0019, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.1981311645613428, |
|
"grad_norm": 0.434892863035202, |
|
"learning_rate": 0.00019419218423441414, |
|
"loss": 0.9573, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.19899636615331373, |
|
"grad_norm": 0.43879738450050354, |
|
"learning_rate": 0.00019409032739762863, |
|
"loss": 0.9556, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.19986156774528466, |
|
"grad_norm": 0.4677106738090515, |
|
"learning_rate": 0.00019398761231700088, |
|
"loss": 1.026, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.20072676933725558, |
|
"grad_norm": 0.44820311665534973, |
|
"learning_rate": 0.0001938840399294454, |
|
"loss": 0.9921, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.2015919709292265, |
|
"grad_norm": 0.4272112250328064, |
|
"learning_rate": 0.00019377961117969644, |
|
"loss": 0.9864, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.20245717252119744, |
|
"grad_norm": 0.44302093982696533, |
|
"learning_rate": 0.0001936743270202997, |
|
"loss": 0.9945, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.20332237411316836, |
|
"grad_norm": 0.43296101689338684, |
|
"learning_rate": 0.0001935681884116034, |
|
"loss": 0.9394, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.2041875757051393, |
|
"grad_norm": 0.4531594514846802, |
|
"learning_rate": 0.00019346119632174968, |
|
"loss": 1.0248, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.20505277729711022, |
|
"grad_norm": 0.4667482078075409, |
|
"learning_rate": 0.00019335335172666565, |
|
"loss": 0.944, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.20591797888908114, |
|
"grad_norm": 0.41053980588912964, |
|
"learning_rate": 0.00019324465561005452, |
|
"loss": 0.9948, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.2067831804810521, |
|
"grad_norm": 0.4748595952987671, |
|
"learning_rate": 0.0001931351089633867, |
|
"loss": 0.9561, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.20764838207302302, |
|
"grad_norm": 0.45077425241470337, |
|
"learning_rate": 0.00019302471278589061, |
|
"loss": 0.9711, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.20851358366499395, |
|
"grad_norm": 0.4596273899078369, |
|
"learning_rate": 0.00019291346808454382, |
|
"loss": 0.9931, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.20937878525696488, |
|
"grad_norm": 0.4171757102012634, |
|
"learning_rate": 0.00019280137587406352, |
|
"loss": 0.9529, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.2102439868489358, |
|
"grad_norm": 0.43460047245025635, |
|
"learning_rate": 0.00019268843717689754, |
|
"loss": 0.9599, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.21110918844090673, |
|
"grad_norm": 0.44364187121391296, |
|
"learning_rate": 0.00019257465302321495, |
|
"loss": 0.973, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.21197439003287766, |
|
"grad_norm": 0.4260512590408325, |
|
"learning_rate": 0.00019246002445089656, |
|
"loss": 0.9951, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.21283959162484858, |
|
"grad_norm": 0.47502508759498596, |
|
"learning_rate": 0.00019234455250552554, |
|
"loss": 1.0132, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.2137047932168195, |
|
"grad_norm": 0.43591979146003723, |
|
"learning_rate": 0.00019222823824037804, |
|
"loss": 0.9455, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.21456999480879044, |
|
"grad_norm": 0.42019540071487427, |
|
"learning_rate": 0.00019211108271641316, |
|
"loss": 1.009, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.21543519640076136, |
|
"grad_norm": 0.4755167067050934, |
|
"learning_rate": 0.00019199308700226374, |
|
"loss": 1.0004, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.21630039799273232, |
|
"grad_norm": 0.4237242639064789, |
|
"learning_rate": 0.00019187425217422632, |
|
"loss": 0.9806, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.21716559958470324, |
|
"grad_norm": 0.4744568169116974, |
|
"learning_rate": 0.0001917545793162514, |
|
"loss": 0.9841, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.21803080117667417, |
|
"grad_norm": 0.4334425628185272, |
|
"learning_rate": 0.00019163406951993363, |
|
"loss": 0.9809, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.2188960027686451, |
|
"grad_norm": 0.419150710105896, |
|
"learning_rate": 0.00019151272388450173, |
|
"loss": 0.9872, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.21976120436061602, |
|
"grad_norm": 0.4238300621509552, |
|
"learning_rate": 0.00019139054351680846, |
|
"loss": 0.9613, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.22062640595258695, |
|
"grad_norm": 0.4214048385620117, |
|
"learning_rate": 0.00019126752953132069, |
|
"loss": 0.9791, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.22149160754455788, |
|
"grad_norm": 0.4584942162036896, |
|
"learning_rate": 0.00019114368305010905, |
|
"loss": 1.0075, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.2223568091365288, |
|
"grad_norm": 0.435167133808136, |
|
"learning_rate": 0.00019101900520283784, |
|
"loss": 0.9987, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.22322201072849973, |
|
"grad_norm": 0.43369898200035095, |
|
"learning_rate": 0.0001908934971267546, |
|
"loss": 0.9703, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.22408721232047066, |
|
"grad_norm": 0.452718585729599, |
|
"learning_rate": 0.00019076715996667977, |
|
"loss": 0.9758, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.2249524139124416, |
|
"grad_norm": 0.43349212408065796, |
|
"learning_rate": 0.00019063999487499637, |
|
"loss": 0.9425, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.22581761550441254, |
|
"grad_norm": 0.5821581482887268, |
|
"learning_rate": 0.00019051200301163922, |
|
"loss": 1.0269, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.22668281709638347, |
|
"grad_norm": 0.4367370307445526, |
|
"learning_rate": 0.00019038318554408479, |
|
"loss": 0.981, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.2275480186883544, |
|
"grad_norm": 0.43077537417411804, |
|
"learning_rate": 0.00019025354364734, |
|
"loss": 0.9795, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.22841322028032532, |
|
"grad_norm": 0.43558841943740845, |
|
"learning_rate": 0.00019012307850393197, |
|
"loss": 0.9835, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.22927842187229625, |
|
"grad_norm": 0.4512316882610321, |
|
"learning_rate": 0.000189991791303897, |
|
"loss": 1.0004, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.23014362346426717, |
|
"grad_norm": 0.4050248861312866, |
|
"learning_rate": 0.0001898596832447698, |
|
"loss": 0.9768, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.2310088250562381, |
|
"grad_norm": 0.45898815989494324, |
|
"learning_rate": 0.00018972675553157253, |
|
"loss": 1.0172, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.23187402664820903, |
|
"grad_norm": 0.4515649676322937, |
|
"learning_rate": 0.00018959300937680372, |
|
"loss": 1.0098, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.23273922824017995, |
|
"grad_norm": 0.42254117131233215, |
|
"learning_rate": 0.00018945844600042747, |
|
"loss": 0.9975, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.23360442983215088, |
|
"grad_norm": 0.4825042486190796, |
|
"learning_rate": 0.00018932306662986209, |
|
"loss": 1.008, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.23446963142412183, |
|
"grad_norm": 0.4267342686653137, |
|
"learning_rate": 0.00018918687249996888, |
|
"loss": 1.0035, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.23533483301609276, |
|
"grad_norm": 0.4438179135322571, |
|
"learning_rate": 0.00018904986485304104, |
|
"loss": 0.9971, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.23620003460806369, |
|
"grad_norm": 0.4820108413696289, |
|
"learning_rate": 0.0001889120449387923, |
|
"loss": 0.9482, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.2370652362000346, |
|
"grad_norm": 0.5275493264198303, |
|
"learning_rate": 0.00018877341401434542, |
|
"loss": 0.978, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.23793043779200554, |
|
"grad_norm": 0.40757259726524353, |
|
"learning_rate": 0.00018863397334422074, |
|
"loss": 1.0092, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.23879563938397647, |
|
"grad_norm": 0.5136415362358093, |
|
"learning_rate": 0.00018849372420032482, |
|
"loss": 0.9226, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.2396608409759474, |
|
"grad_norm": 0.41990041732788086, |
|
"learning_rate": 0.0001883526678619385, |
|
"loss": 1.0081, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.24052604256791832, |
|
"grad_norm": 0.42340949177742004, |
|
"learning_rate": 0.00018821080561570564, |
|
"loss": 0.9498, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.24139124415988925, |
|
"grad_norm": 0.4153204560279846, |
|
"learning_rate": 0.00018806813875562106, |
|
"loss": 0.9773, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.24225644575186017, |
|
"grad_norm": 0.4485369622707367, |
|
"learning_rate": 0.0001879246685830189, |
|
"loss": 0.9816, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.24312164734383113, |
|
"grad_norm": 0.41648054122924805, |
|
"learning_rate": 0.0001877803964065607, |
|
"loss": 0.9895, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.24398684893580205, |
|
"grad_norm": 0.45885545015335083, |
|
"learning_rate": 0.0001876353235422234, |
|
"loss": 0.9631, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.24485205052777298, |
|
"grad_norm": 0.4401138722896576, |
|
"learning_rate": 0.0001874894513132876, |
|
"loss": 0.9606, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.2457172521197439, |
|
"grad_norm": 0.46568748354911804, |
|
"learning_rate": 0.00018734278105032504, |
|
"loss": 0.962, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.24658245371171483, |
|
"grad_norm": 0.4546853005886078, |
|
"learning_rate": 0.0001871953140911869, |
|
"loss": 0.9967, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.24744765530368576, |
|
"grad_norm": 0.5038758516311646, |
|
"learning_rate": 0.0001870470517809913, |
|
"loss": 0.9848, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.2483128568956567, |
|
"grad_norm": 0.44099846482276917, |
|
"learning_rate": 0.0001868979954721113, |
|
"loss": 1.0159, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.2491780584876276, |
|
"grad_norm": 0.4379419684410095, |
|
"learning_rate": 0.0001867481465241622, |
|
"loss": 0.9912, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.25004326007959854, |
|
"grad_norm": 0.4447334408760071, |
|
"learning_rate": 0.00018659750630398953, |
|
"loss": 0.9705, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.2509084616715695, |
|
"grad_norm": 0.4537925720214844, |
|
"learning_rate": 0.00018644607618565636, |
|
"loss": 1.0205, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.2517736632635404, |
|
"grad_norm": 0.4481840133666992, |
|
"learning_rate": 0.00018629385755043068, |
|
"loss": 0.9782, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.25263886485551135, |
|
"grad_norm": 0.467529833316803, |
|
"learning_rate": 0.0001861408517867731, |
|
"loss": 0.9552, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.25350406644748225, |
|
"grad_norm": 0.449776291847229, |
|
"learning_rate": 0.0001859870602903239, |
|
"loss": 0.9672, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.2543692680394532, |
|
"grad_norm": 0.6294044256210327, |
|
"learning_rate": 0.0001858324844638905, |
|
"loss": 0.927, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.2552344696314241, |
|
"grad_norm": 0.5393489599227905, |
|
"learning_rate": 0.0001856771257174345, |
|
"loss": 0.9396, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.25609967122339505, |
|
"grad_norm": 0.44971466064453125, |
|
"learning_rate": 0.00018552098546805888, |
|
"loss": 0.9652, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.256964872815366, |
|
"grad_norm": 0.44472429156303406, |
|
"learning_rate": 0.0001853640651399952, |
|
"loss": 1.0073, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.2578300744073369, |
|
"grad_norm": 0.47125008702278137, |
|
"learning_rate": 0.00018520636616459036, |
|
"loss": 1.0136, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.25869527599930786, |
|
"grad_norm": 0.45552554726600647, |
|
"learning_rate": 0.00018504788998029387, |
|
"loss": 0.9856, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.25956047759127876, |
|
"grad_norm": 0.4563223421573639, |
|
"learning_rate": 0.00018488863803264432, |
|
"loss": 0.9882, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.2604256791832497, |
|
"grad_norm": 0.48147350549697876, |
|
"learning_rate": 0.00018472861177425655, |
|
"loss": 0.9911, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.2612908807752206, |
|
"grad_norm": 0.43628785014152527, |
|
"learning_rate": 0.0001845678126648083, |
|
"loss": 0.9816, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.26215608236719157, |
|
"grad_norm": 0.4450732171535492, |
|
"learning_rate": 0.00018440624217102674, |
|
"loss": 0.9672, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.26302128395916247, |
|
"grad_norm": 0.45051246881484985, |
|
"learning_rate": 0.00018424390176667528, |
|
"loss": 0.9984, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.2638864855511334, |
|
"grad_norm": 0.4232747554779053, |
|
"learning_rate": 0.00018408079293254006, |
|
"loss": 0.9948, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.2647516871431043, |
|
"grad_norm": 0.4707663059234619, |
|
"learning_rate": 0.0001839169171564165, |
|
"loss": 0.9916, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.2656168887350753, |
|
"grad_norm": 0.46704453229904175, |
|
"learning_rate": 0.00018375227593309546, |
|
"loss": 1.0485, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.26648209032704623, |
|
"grad_norm": 0.4697518050670624, |
|
"learning_rate": 0.00018358687076435015, |
|
"loss": 1.0106, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.2673472919190171, |
|
"grad_norm": 0.44449952244758606, |
|
"learning_rate": 0.00018342070315892182, |
|
"loss": 1.0133, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.2682124935109881, |
|
"grad_norm": 0.43841657042503357, |
|
"learning_rate": 0.0001832537746325064, |
|
"loss": 1.003, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.269077695102959, |
|
"grad_norm": 0.45256274938583374, |
|
"learning_rate": 0.00018308608670774054, |
|
"loss": 0.991, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.26994289669492993, |
|
"grad_norm": 0.4906154274940491, |
|
"learning_rate": 0.00018291764091418769, |
|
"loss": 1.0044, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.27080809828690083, |
|
"grad_norm": 0.42110729217529297, |
|
"learning_rate": 0.00018274843878832427, |
|
"loss": 0.9705, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.2716732998788718, |
|
"grad_norm": 0.4667767584323883, |
|
"learning_rate": 0.00018257848187352543, |
|
"loss": 1.0161, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.2725385014708427, |
|
"grad_norm": 0.46111616492271423, |
|
"learning_rate": 0.00018240777172005138, |
|
"loss": 0.9946, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.27340370306281364, |
|
"grad_norm": 0.420553594827652, |
|
"learning_rate": 0.0001822363098850327, |
|
"loss": 0.9672, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.27426890465478454, |
|
"grad_norm": 0.4221336543560028, |
|
"learning_rate": 0.0001820640979324566, |
|
"loss": 0.9421, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.2751341062467555, |
|
"grad_norm": 0.42993614077568054, |
|
"learning_rate": 0.0001818911374331524, |
|
"loss": 0.9645, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.27599930783872645, |
|
"grad_norm": 0.42793458700180054, |
|
"learning_rate": 0.00018171742996477737, |
|
"loss": 0.9735, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.27686450943069735, |
|
"grad_norm": 0.49229153990745544, |
|
"learning_rate": 0.00018154297711180212, |
|
"loss": 0.97, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.2777297110226683, |
|
"grad_norm": 0.4865579307079315, |
|
"learning_rate": 0.00018136778046549642, |
|
"loss": 0.9487, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.2785949126146392, |
|
"grad_norm": 0.442254900932312, |
|
"learning_rate": 0.0001811918416239144, |
|
"loss": 0.9725, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.27946011420661016, |
|
"grad_norm": 0.45241814851760864, |
|
"learning_rate": 0.0001810151621918802, |
|
"loss": 0.9681, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.28032531579858105, |
|
"grad_norm": 0.4351949095726013, |
|
"learning_rate": 0.00018083774378097327, |
|
"loss": 0.9728, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.281190517390552, |
|
"grad_norm": 0.46627506613731384, |
|
"learning_rate": 0.0001806595880095136, |
|
"loss": 0.9751, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.2820557189825229, |
|
"grad_norm": 0.4739546775817871, |
|
"learning_rate": 0.000180480696502547, |
|
"loss": 0.986, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.28292092057449386, |
|
"grad_norm": 0.4473097622394562, |
|
"learning_rate": 0.00018030107089183028, |
|
"loss": 0.9424, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.28378612216646476, |
|
"grad_norm": 0.4566546678543091, |
|
"learning_rate": 0.00018012071281581644, |
|
"loss": 0.9743, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.2846513237584357, |
|
"grad_norm": 0.49578657746315, |
|
"learning_rate": 0.00017993962391963953, |
|
"loss": 0.982, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.28551652535040667, |
|
"grad_norm": 0.43043839931488037, |
|
"learning_rate": 0.00017975780585509994, |
|
"loss": 0.9775, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.28638172694237757, |
|
"grad_norm": 0.4603051543235779, |
|
"learning_rate": 0.000179575260280649, |
|
"loss": 1.019, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.2872469285343485, |
|
"grad_norm": 0.4643576443195343, |
|
"learning_rate": 0.00017939198886137401, |
|
"loss": 1.0245, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.2881121301263194, |
|
"grad_norm": 0.4360422194004059, |
|
"learning_rate": 0.00017920799326898328, |
|
"loss": 0.9784, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.2889773317182904, |
|
"grad_norm": 0.44874873757362366, |
|
"learning_rate": 0.00017902327518179033, |
|
"loss": 0.9807, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.2898425333102613, |
|
"grad_norm": 0.4646460711956024, |
|
"learning_rate": 0.0001788378362846992, |
|
"loss": 1.009, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.29070773490223223, |
|
"grad_norm": 0.44585877656936646, |
|
"learning_rate": 0.00017865167826918856, |
|
"loss": 0.9996, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.2915729364942031, |
|
"grad_norm": 0.43145236372947693, |
|
"learning_rate": 0.0001784648028332967, |
|
"loss": 0.9971, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.2924381380861741, |
|
"grad_norm": 0.4801592230796814, |
|
"learning_rate": 0.0001782772116816057, |
|
"loss": 0.9416, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.293303339678145, |
|
"grad_norm": 0.4545969069004059, |
|
"learning_rate": 0.00017808890652522613, |
|
"loss": 0.9463, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.29416854127011594, |
|
"grad_norm": 0.4462297856807709, |
|
"learning_rate": 0.0001778998890817813, |
|
"loss": 0.9647, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.2950337428620869, |
|
"grad_norm": 0.480363667011261, |
|
"learning_rate": 0.0001777101610753917, |
|
"loss": 0.9527, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.2958989444540578, |
|
"grad_norm": 0.4686351418495178, |
|
"learning_rate": 0.00017751972423665912, |
|
"loss": 0.9553, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.29676414604602874, |
|
"grad_norm": 0.4903172552585602, |
|
"learning_rate": 0.000177328580302651, |
|
"loss": 0.9789, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.29762934763799964, |
|
"grad_norm": 0.4888453185558319, |
|
"learning_rate": 0.00017713673101688452, |
|
"loss": 1.002, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.2984945492299706, |
|
"grad_norm": 0.49155911803245544, |
|
"learning_rate": 0.0001769441781293108, |
|
"loss": 0.9818, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.2993597508219415, |
|
"grad_norm": 0.4467802047729492, |
|
"learning_rate": 0.00017675092339629876, |
|
"loss": 1.0178, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.30022495241391245, |
|
"grad_norm": 0.4385581612586975, |
|
"learning_rate": 0.00017655696858061924, |
|
"loss": 0.9938, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.30109015400588335, |
|
"grad_norm": 0.4440990090370178, |
|
"learning_rate": 0.00017636231545142884, |
|
"loss": 1.0216, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.3019553555978543, |
|
"grad_norm": 0.4298366606235504, |
|
"learning_rate": 0.0001761669657842539, |
|
"loss": 1.0444, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.30282055718982526, |
|
"grad_norm": 0.47890421748161316, |
|
"learning_rate": 0.0001759709213609741, |
|
"loss": 0.9882, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.30368575878179616, |
|
"grad_norm": 0.4364605247974396, |
|
"learning_rate": 0.00017577418396980647, |
|
"loss": 1.0003, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.3045509603737671, |
|
"grad_norm": 0.42261895537376404, |
|
"learning_rate": 0.0001755767554052888, |
|
"loss": 0.9954, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.305416161965738, |
|
"grad_norm": 0.448160320520401, |
|
"learning_rate": 0.00017537863746826352, |
|
"loss": 0.9995, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.30628136355770896, |
|
"grad_norm": 0.46218201518058777, |
|
"learning_rate": 0.0001751798319658611, |
|
"loss": 0.9356, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.30714656514967986, |
|
"grad_norm": 0.45802685618400574, |
|
"learning_rate": 0.00017498034071148367, |
|
"loss": 0.9978, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.3080117667416508, |
|
"grad_norm": 0.4749560058116913, |
|
"learning_rate": 0.0001747801655247884, |
|
"loss": 0.9887, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.3088769683336217, |
|
"grad_norm": 0.44203734397888184, |
|
"learning_rate": 0.000174579308231671, |
|
"loss": 0.9844, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.30974216992559267, |
|
"grad_norm": 0.509231686592102, |
|
"learning_rate": 0.0001743777706642489, |
|
"loss": 1.0078, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.31060737151756357, |
|
"grad_norm": 0.44051945209503174, |
|
"learning_rate": 0.00017417555466084475, |
|
"loss": 1.0002, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.3114725731095345, |
|
"grad_norm": 0.4557575285434723, |
|
"learning_rate": 0.0001739726620659695, |
|
"loss": 0.9577, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.3123377747015055, |
|
"grad_norm": 0.4637974798679352, |
|
"learning_rate": 0.0001737690947303056, |
|
"loss": 0.9786, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.3132029762934764, |
|
"grad_norm": 0.44868960976600647, |
|
"learning_rate": 0.00017356485451069023, |
|
"loss": 0.9733, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.31406817788544733, |
|
"grad_norm": 0.43835899233818054, |
|
"learning_rate": 0.00017335994327009814, |
|
"loss": 1.0112, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.31493337947741823, |
|
"grad_norm": 0.4374648928642273, |
|
"learning_rate": 0.0001731543628776249, |
|
"loss": 1.0003, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.3157985810693892, |
|
"grad_norm": 0.4368119239807129, |
|
"learning_rate": 0.00017294811520846972, |
|
"loss": 0.9886, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.3166637826613601, |
|
"grad_norm": 0.47979751229286194, |
|
"learning_rate": 0.00017274120214391832, |
|
"loss": 0.9984, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.31752898425333104, |
|
"grad_norm": 0.4430343210697174, |
|
"learning_rate": 0.00017253362557132583, |
|
"loss": 1.0221, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.31839418584530194, |
|
"grad_norm": 0.47513678669929504, |
|
"learning_rate": 0.00017232538738409963, |
|
"loss": 0.9883, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.3192593874372729, |
|
"grad_norm": 0.41813385486602783, |
|
"learning_rate": 0.00017211648948168187, |
|
"loss": 1.0066, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.3201245890292438, |
|
"grad_norm": 0.463078111410141, |
|
"learning_rate": 0.0001719069337695325, |
|
"loss": 0.9792, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.32098979062121474, |
|
"grad_norm": 0.4492539167404175, |
|
"learning_rate": 0.00017169672215911142, |
|
"loss": 1.0032, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.3218549922131857, |
|
"grad_norm": 0.47429776191711426, |
|
"learning_rate": 0.00017148585656786146, |
|
"loss": 1.0013, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.3227201938051566, |
|
"grad_norm": 0.474584698677063, |
|
"learning_rate": 0.00017127433891919074, |
|
"loss": 0.9642, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 0.32358539539712755, |
|
"grad_norm": 0.4662615954875946, |
|
"learning_rate": 0.000171062171142455, |
|
"loss": 0.9688, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.32445059698909845, |
|
"grad_norm": 0.43062394857406616, |
|
"learning_rate": 0.00017084935517294023, |
|
"loss": 0.9717, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.3253157985810694, |
|
"grad_norm": 0.478098064661026, |
|
"learning_rate": 0.00017063589295184483, |
|
"loss": 0.9646, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.3261810001730403, |
|
"grad_norm": 0.4593997895717621, |
|
"learning_rate": 0.000170421786426262, |
|
"loss": 1.0208, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 0.32704620176501126, |
|
"grad_norm": 0.44811689853668213, |
|
"learning_rate": 0.000170207037549162, |
|
"loss": 0.982, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.32791140335698216, |
|
"grad_norm": 0.5170145034790039, |
|
"learning_rate": 0.0001699916482793742, |
|
"loss": 0.9832, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 0.3287766049489531, |
|
"grad_norm": 0.4444349706172943, |
|
"learning_rate": 0.00016977562058156936, |
|
"loss": 1.0137, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.329641806540924, |
|
"grad_norm": 0.44498804211616516, |
|
"learning_rate": 0.00016955895642624166, |
|
"loss": 1.0198, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 0.33050700813289496, |
|
"grad_norm": 0.44884684681892395, |
|
"learning_rate": 0.00016934165778969074, |
|
"loss": 0.9827, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.3313722097248659, |
|
"grad_norm": 0.48218467831611633, |
|
"learning_rate": 0.00016912372665400354, |
|
"loss": 0.9728, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 0.3322374113168368, |
|
"grad_norm": 0.45547670125961304, |
|
"learning_rate": 0.00016890516500703645, |
|
"loss": 0.9389, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.33310261290880777, |
|
"grad_norm": 0.43529343605041504, |
|
"learning_rate": 0.0001686859748423971, |
|
"loss": 0.9806, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.33396781450077867, |
|
"grad_norm": 0.4809509813785553, |
|
"learning_rate": 0.00016846615815942597, |
|
"loss": 0.9655, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.3348330160927496, |
|
"grad_norm": 0.4193096160888672, |
|
"learning_rate": 0.00016824571696317844, |
|
"loss": 1.0132, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 0.3356982176847205, |
|
"grad_norm": 0.4560771882534027, |
|
"learning_rate": 0.00016802465326440634, |
|
"loss": 1.0416, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.3365634192766915, |
|
"grad_norm": 0.44306620955467224, |
|
"learning_rate": 0.00016780296907953972, |
|
"loss": 0.9739, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 0.3374286208686624, |
|
"grad_norm": 0.44249188899993896, |
|
"learning_rate": 0.00016758066643066826, |
|
"loss": 0.9777, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.33829382246063333, |
|
"grad_norm": 0.44801339507102966, |
|
"learning_rate": 0.00016735774734552306, |
|
"loss": 0.9981, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 0.33915902405260423, |
|
"grad_norm": 0.4256415069103241, |
|
"learning_rate": 0.000167134213857458, |
|
"loss": 0.9594, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.3400242256445752, |
|
"grad_norm": 0.44402891397476196, |
|
"learning_rate": 0.0001669100680054312, |
|
"loss": 0.9862, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 0.34088942723654614, |
|
"grad_norm": 0.5019104480743408, |
|
"learning_rate": 0.0001666853118339865, |
|
"loss": 0.975, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.34175462882851704, |
|
"grad_norm": 0.47364917397499084, |
|
"learning_rate": 0.00016645994739323473, |
|
"loss": 0.969, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.342619830420488, |
|
"grad_norm": 0.4283023774623871, |
|
"learning_rate": 0.00016623397673883508, |
|
"loss": 0.9786, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.3434850320124589, |
|
"grad_norm": 0.47522231936454773, |
|
"learning_rate": 0.00016600740193197623, |
|
"loss": 0.9779, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 0.34435023360442985, |
|
"grad_norm": 0.46930992603302, |
|
"learning_rate": 0.00016578022503935773, |
|
"loss": 1.0124, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.34521543519640074, |
|
"grad_norm": 0.5017907619476318, |
|
"learning_rate": 0.000165552448133171, |
|
"loss": 0.9931, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 0.3460806367883717, |
|
"grad_norm": 0.5087099075317383, |
|
"learning_rate": 0.0001653240732910804, |
|
"loss": 0.94, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.3469458383803426, |
|
"grad_norm": 0.44298025965690613, |
|
"learning_rate": 0.00016509510259620457, |
|
"loss": 0.9793, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 0.34781103997231355, |
|
"grad_norm": 0.43045955896377563, |
|
"learning_rate": 0.00016486553813709696, |
|
"loss": 0.9673, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.3486762415642845, |
|
"grad_norm": 0.5183268189430237, |
|
"learning_rate": 0.00016463538200772718, |
|
"loss": 1.0128, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 0.3495414431562554, |
|
"grad_norm": 0.47518181800842285, |
|
"learning_rate": 0.00016440463630746174, |
|
"loss": 0.9851, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.35040664474822636, |
|
"grad_norm": 0.4647828936576843, |
|
"learning_rate": 0.00016417330314104491, |
|
"loss": 0.9923, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.35127184634019726, |
|
"grad_norm": 0.484781414270401, |
|
"learning_rate": 0.0001639413846185795, |
|
"loss": 0.9818, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.3521370479321682, |
|
"grad_norm": 0.4849836230278015, |
|
"learning_rate": 0.00016370888285550763, |
|
"loss": 0.9607, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 0.3530022495241391, |
|
"grad_norm": 0.48198217153549194, |
|
"learning_rate": 0.0001634757999725915, |
|
"loss": 0.9953, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.35386745111611007, |
|
"grad_norm": 0.4720988869667053, |
|
"learning_rate": 0.00016324213809589393, |
|
"loss": 0.9718, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 0.35473265270808096, |
|
"grad_norm": 0.48839956521987915, |
|
"learning_rate": 0.00016300789935675908, |
|
"loss": 0.9521, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.3555978543000519, |
|
"grad_norm": 0.47152647376060486, |
|
"learning_rate": 0.00016277308589179287, |
|
"loss": 0.9766, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 0.3564630558920228, |
|
"grad_norm": 0.4669671654701233, |
|
"learning_rate": 0.00016253769984284365, |
|
"loss": 1.0222, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.35732825748399377, |
|
"grad_norm": 0.4763648509979248, |
|
"learning_rate": 0.00016230174335698258, |
|
"loss": 1.0098, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 0.3581934590759647, |
|
"grad_norm": 0.45041629672050476, |
|
"learning_rate": 0.00016206521858648406, |
|
"loss": 0.9917, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.3590586606679356, |
|
"grad_norm": 0.4782506823539734, |
|
"learning_rate": 0.00016182812768880602, |
|
"loss": 0.935, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 0.3599238622599066, |
|
"grad_norm": 0.4275178015232086, |
|
"learning_rate": 0.00016159047282657043, |
|
"loss": 0.9905, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.3607890638518775, |
|
"grad_norm": 0.4617001414299011, |
|
"learning_rate": 0.0001613522561675433, |
|
"loss": 0.9867, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 0.36165426544384843, |
|
"grad_norm": 0.5393168330192566, |
|
"learning_rate": 0.00016111347988461523, |
|
"loss": 0.9924, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.36251946703581933, |
|
"grad_norm": 0.4483303427696228, |
|
"learning_rate": 0.0001608741461557813, |
|
"loss": 0.9911, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 0.3633846686277903, |
|
"grad_norm": 0.4839470088481903, |
|
"learning_rate": 0.0001606342571641214, |
|
"loss": 0.9432, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.3642498702197612, |
|
"grad_norm": 0.4631226062774658, |
|
"learning_rate": 0.00016039381509778017, |
|
"loss": 0.9823, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 0.36511507181173214, |
|
"grad_norm": 0.4838786721229553, |
|
"learning_rate": 0.00016015282214994724, |
|
"loss": 0.9399, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.36598027340370304, |
|
"grad_norm": 0.458999365568161, |
|
"learning_rate": 0.00015991128051883697, |
|
"loss": 1.0042, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 0.366845474995674, |
|
"grad_norm": 0.45957833528518677, |
|
"learning_rate": 0.00015966919240766858, |
|
"loss": 0.982, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.36771067658764495, |
|
"grad_norm": 0.4562719464302063, |
|
"learning_rate": 0.000159426560024646, |
|
"loss": 0.9766, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 0.36857587817961585, |
|
"grad_norm": 0.4500807821750641, |
|
"learning_rate": 0.00015918338558293773, |
|
"loss": 0.9552, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.3694410797715868, |
|
"grad_norm": 0.42124584317207336, |
|
"learning_rate": 0.00015893967130065667, |
|
"loss": 0.9275, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 0.3703062813635577, |
|
"grad_norm": 0.4581054747104645, |
|
"learning_rate": 0.00015869541940083978, |
|
"loss": 1.0353, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.37117148295552865, |
|
"grad_norm": 0.47650617361068726, |
|
"learning_rate": 0.0001584506321114281, |
|
"loss": 1.0077, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 0.37203668454749955, |
|
"grad_norm": 0.45496097207069397, |
|
"learning_rate": 0.00015820531166524593, |
|
"loss": 1.0202, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.3729018861394705, |
|
"grad_norm": 0.46953004598617554, |
|
"learning_rate": 0.000157959460299981, |
|
"loss": 0.9695, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 0.3737670877314414, |
|
"grad_norm": 0.4414077401161194, |
|
"learning_rate": 0.00015771308025816372, |
|
"loss": 0.967, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.37463228932341236, |
|
"grad_norm": 0.4325014650821686, |
|
"learning_rate": 0.00015746617378714674, |
|
"loss": 0.9621, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 0.37549749091538326, |
|
"grad_norm": 0.45029759407043457, |
|
"learning_rate": 0.00015721874313908468, |
|
"loss": 0.9789, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.3763626925073542, |
|
"grad_norm": 0.4882911443710327, |
|
"learning_rate": 0.00015697079057091332, |
|
"loss": 0.9836, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 0.37722789409932517, |
|
"grad_norm": 0.4586533308029175, |
|
"learning_rate": 0.0001567223183443292, |
|
"loss": 0.9221, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.37809309569129607, |
|
"grad_norm": 0.4566982686519623, |
|
"learning_rate": 0.00015647332872576887, |
|
"loss": 0.9981, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 0.378958297283267, |
|
"grad_norm": 0.4582918584346771, |
|
"learning_rate": 0.00015622382398638825, |
|
"loss": 0.959, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.3798234988752379, |
|
"grad_norm": 0.455666720867157, |
|
"learning_rate": 0.000155973806402042, |
|
"loss": 0.9668, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 0.3806887004672089, |
|
"grad_norm": 0.4562900960445404, |
|
"learning_rate": 0.00015572327825326263, |
|
"loss": 0.9914, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.3815539020591798, |
|
"grad_norm": 0.4523032307624817, |
|
"learning_rate": 0.00015547224182523977, |
|
"loss": 0.9311, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 0.3824191036511507, |
|
"grad_norm": 0.4746415913105011, |
|
"learning_rate": 0.0001552206994077993, |
|
"loss": 1.0036, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.3832843052431216, |
|
"grad_norm": 0.46040624380111694, |
|
"learning_rate": 0.00015496865329538254, |
|
"loss": 1.0197, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 0.3841495068350926, |
|
"grad_norm": 0.4492608308792114, |
|
"learning_rate": 0.00015471610578702522, |
|
"loss": 0.9614, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.3850147084270635, |
|
"grad_norm": 0.4558756649494171, |
|
"learning_rate": 0.00015446305918633647, |
|
"loss": 1.0048, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 0.38587991001903443, |
|
"grad_norm": 0.46587228775024414, |
|
"learning_rate": 0.00015420951580147807, |
|
"loss": 0.9887, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.3867451116110054, |
|
"grad_norm": 0.46439608931541443, |
|
"learning_rate": 0.00015395547794514296, |
|
"loss": 0.9568, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 0.3876103132029763, |
|
"grad_norm": 0.5386447310447693, |
|
"learning_rate": 0.00015370094793453466, |
|
"loss": 1.0201, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.38847551479494724, |
|
"grad_norm": 0.46466854214668274, |
|
"learning_rate": 0.0001534459280913456, |
|
"loss": 0.9553, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 0.38934071638691814, |
|
"grad_norm": 0.4588780999183655, |
|
"learning_rate": 0.0001531904207417365, |
|
"loss": 1.002, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.3902059179788891, |
|
"grad_norm": 0.4447367191314697, |
|
"learning_rate": 0.00015293442821631467, |
|
"loss": 0.9832, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 0.39107111957086, |
|
"grad_norm": 0.4650314450263977, |
|
"learning_rate": 0.00015267795285011296, |
|
"loss": 0.9939, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.39193632116283095, |
|
"grad_norm": 0.44617509841918945, |
|
"learning_rate": 0.0001524209969825685, |
|
"loss": 0.9858, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 0.39280152275480185, |
|
"grad_norm": 0.469292551279068, |
|
"learning_rate": 0.00015216356295750128, |
|
"loss": 0.9522, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.3936667243467728, |
|
"grad_norm": 0.5017248392105103, |
|
"learning_rate": 0.00015190565312309286, |
|
"loss": 0.9806, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 0.39453192593874375, |
|
"grad_norm": 0.4717511236667633, |
|
"learning_rate": 0.00015164726983186476, |
|
"loss": 0.9885, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.39539712753071465, |
|
"grad_norm": 0.4415047764778137, |
|
"learning_rate": 0.00015138841544065724, |
|
"loss": 0.977, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 0.3962623291226856, |
|
"grad_norm": 0.46711215376853943, |
|
"learning_rate": 0.00015112909231060768, |
|
"loss": 1.0132, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.3971275307146565, |
|
"grad_norm": 0.44584599137306213, |
|
"learning_rate": 0.00015086930280712904, |
|
"loss": 1.0131, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 0.39799273230662746, |
|
"grad_norm": 0.46857744455337524, |
|
"learning_rate": 0.00015060904929988824, |
|
"loss": 0.9522, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.39885793389859836, |
|
"grad_norm": 0.5087977647781372, |
|
"learning_rate": 0.00015034833416278473, |
|
"loss": 0.9703, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 0.3997231354905693, |
|
"grad_norm": 0.45563820004463196, |
|
"learning_rate": 0.0001500871597739286, |
|
"loss": 0.9969, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.4005883370825402, |
|
"grad_norm": 0.4566476345062256, |
|
"learning_rate": 0.00014982552851561904, |
|
"loss": 1.0003, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 0.40145353867451117, |
|
"grad_norm": 0.5005662441253662, |
|
"learning_rate": 0.00014956344277432257, |
|
"loss": 0.9812, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.40231874026648207, |
|
"grad_norm": 0.48637065291404724, |
|
"learning_rate": 0.00014930090494065133, |
|
"loss": 1.0326, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.403183941858453, |
|
"grad_norm": 0.47612977027893066, |
|
"learning_rate": 0.0001490379174093411, |
|
"loss": 0.9604, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.404049143450424, |
|
"grad_norm": 0.4837099015712738, |
|
"learning_rate": 0.00014877448257922965, |
|
"loss": 0.9985, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 0.4049143450423949, |
|
"grad_norm": 0.5050654411315918, |
|
"learning_rate": 0.00014851060285323478, |
|
"loss": 0.9943, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.40577954663436583, |
|
"grad_norm": 0.4585602581501007, |
|
"learning_rate": 0.00014824628063833233, |
|
"loss": 0.9702, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 0.4066447482263367, |
|
"grad_norm": 0.4651777446269989, |
|
"learning_rate": 0.0001479815183455344, |
|
"loss": 0.9967, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.4075099498183077, |
|
"grad_norm": 0.46856093406677246, |
|
"learning_rate": 0.00014771631838986717, |
|
"loss": 0.9894, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 0.4083751514102786, |
|
"grad_norm": 0.4769989848136902, |
|
"learning_rate": 0.00014745068319034905, |
|
"loss": 0.9982, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.40924035300224954, |
|
"grad_norm": 0.4633113145828247, |
|
"learning_rate": 0.00014718461516996842, |
|
"loss": 1.0114, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 0.41010555459422043, |
|
"grad_norm": 0.46894222497940063, |
|
"learning_rate": 0.0001469181167556617, |
|
"loss": 1.001, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.4109707561861914, |
|
"grad_norm": 0.4495028853416443, |
|
"learning_rate": 0.0001466511903782911, |
|
"loss": 0.9475, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 0.4118359577781623, |
|
"grad_norm": 0.4745693504810333, |
|
"learning_rate": 0.0001463838384726225, |
|
"loss": 0.9911, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.41270115937013324, |
|
"grad_norm": 0.4932793974876404, |
|
"learning_rate": 0.00014611606347730326, |
|
"loss": 0.9857, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 0.4135663609621042, |
|
"grad_norm": 0.45724374055862427, |
|
"learning_rate": 0.00014584786783483996, |
|
"loss": 0.9506, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.4144315625540751, |
|
"grad_norm": 0.48630237579345703, |
|
"learning_rate": 0.000145579253991576, |
|
"loss": 1.0016, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 0.41529676414604605, |
|
"grad_norm": 0.48039719462394714, |
|
"learning_rate": 0.00014531022439766956, |
|
"loss": 1.0147, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.41616196573801695, |
|
"grad_norm": 0.4719230830669403, |
|
"learning_rate": 0.00014504078150707092, |
|
"loss": 0.9935, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 0.4170271673299879, |
|
"grad_norm": 0.4671541154384613, |
|
"learning_rate": 0.00014477092777750037, |
|
"loss": 0.9885, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.4178923689219588, |
|
"grad_norm": 0.4346173107624054, |
|
"learning_rate": 0.00014450066567042556, |
|
"loss": 0.9948, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 0.41875757051392976, |
|
"grad_norm": 0.44263723492622375, |
|
"learning_rate": 0.00014422999765103923, |
|
"loss": 0.9637, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.41962277210590065, |
|
"grad_norm": 0.45122030377388, |
|
"learning_rate": 0.00014395892618823664, |
|
"loss": 0.969, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 0.4204879736978716, |
|
"grad_norm": 0.526986837387085, |
|
"learning_rate": 0.00014368745375459296, |
|
"loss": 0.9778, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.4213531752898425, |
|
"grad_norm": 0.502812385559082, |
|
"learning_rate": 0.00014341558282634096, |
|
"loss": 0.9846, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 0.42221837688181346, |
|
"grad_norm": 0.4661869406700134, |
|
"learning_rate": 0.00014314331588334813, |
|
"loss": 0.9688, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.4230835784737844, |
|
"grad_norm": 0.4835832715034485, |
|
"learning_rate": 0.00014287065540909429, |
|
"loss": 0.976, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 0.4239487800657553, |
|
"grad_norm": 0.46521812677383423, |
|
"learning_rate": 0.00014259760389064884, |
|
"loss": 1.0407, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.42481398165772627, |
|
"grad_norm": 0.4323074519634247, |
|
"learning_rate": 0.00014232416381864803, |
|
"loss": 0.9383, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 0.42567918324969717, |
|
"grad_norm": 0.4658597409725189, |
|
"learning_rate": 0.00014205033768727242, |
|
"loss": 1.011, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.4265443848416681, |
|
"grad_norm": 0.46207860112190247, |
|
"learning_rate": 0.00014177612799422384, |
|
"loss": 0.9617, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 0.427409586433639, |
|
"grad_norm": 0.48865434527397156, |
|
"learning_rate": 0.00014150153724070292, |
|
"loss": 0.9585, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.42827478802561, |
|
"grad_norm": 0.44298145174980164, |
|
"learning_rate": 0.00014122656793138605, |
|
"loss": 0.9844, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 0.4291399896175809, |
|
"grad_norm": 0.4769092798233032, |
|
"learning_rate": 0.00014095122257440265, |
|
"loss": 1.0026, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.43000519120955183, |
|
"grad_norm": 0.4551126956939697, |
|
"learning_rate": 0.0001406755036813122, |
|
"loss": 0.9775, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 0.43087039280152273, |
|
"grad_norm": 0.45167794823646545, |
|
"learning_rate": 0.00014039941376708142, |
|
"loss": 0.9684, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.4317355943934937, |
|
"grad_norm": 0.5096551179885864, |
|
"learning_rate": 0.00014012295535006136, |
|
"loss": 1.0283, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 0.43260079598546464, |
|
"grad_norm": 0.45402616262435913, |
|
"learning_rate": 0.0001398461309519642, |
|
"loss": 0.9989, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.43346599757743554, |
|
"grad_norm": 0.4679848849773407, |
|
"learning_rate": 0.0001395689430978406, |
|
"loss": 1.0361, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 0.4343311991694065, |
|
"grad_norm": 0.4805293381214142, |
|
"learning_rate": 0.00013929139431605633, |
|
"loss": 0.979, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.4351964007613774, |
|
"grad_norm": 0.46778836846351624, |
|
"learning_rate": 0.00013901348713826948, |
|
"loss": 1.0005, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 0.43606160235334834, |
|
"grad_norm": 0.4670213758945465, |
|
"learning_rate": 0.00013873522409940713, |
|
"loss": 0.9671, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.43692680394531924, |
|
"grad_norm": 0.44049155712127686, |
|
"learning_rate": 0.00013845660773764243, |
|
"loss": 0.9851, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 0.4377920055372902, |
|
"grad_norm": 0.4682471752166748, |
|
"learning_rate": 0.00013817764059437132, |
|
"loss": 0.9386, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.4386572071292611, |
|
"grad_norm": 0.4299015700817108, |
|
"learning_rate": 0.00013789832521418941, |
|
"loss": 0.9519, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 0.43952240872123205, |
|
"grad_norm": 0.4857511520385742, |
|
"learning_rate": 0.00013761866414486874, |
|
"loss": 0.9507, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.440387610313203, |
|
"grad_norm": 0.4522685408592224, |
|
"learning_rate": 0.00013733865993733447, |
|
"loss": 0.9712, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 0.4412528119051739, |
|
"grad_norm": 0.4425513446331024, |
|
"learning_rate": 0.00013705831514564186, |
|
"loss": 0.9616, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.44211801349714486, |
|
"grad_norm": 0.4239989221096039, |
|
"learning_rate": 0.0001367776323269527, |
|
"loss": 0.9598, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 0.44298321508911576, |
|
"grad_norm": 0.4723816215991974, |
|
"learning_rate": 0.00013649661404151203, |
|
"loss": 0.9917, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.4438484166810867, |
|
"grad_norm": 0.44909706711769104, |
|
"learning_rate": 0.00013621526285262502, |
|
"loss": 1.0044, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 0.4447136182730576, |
|
"grad_norm": 0.48389700055122375, |
|
"learning_rate": 0.00013593358132663324, |
|
"loss": 1.0128, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.44557881986502856, |
|
"grad_norm": 0.45943713188171387, |
|
"learning_rate": 0.0001356515720328915, |
|
"loss": 0.9519, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 0.44644402145699946, |
|
"grad_norm": 0.5405260324478149, |
|
"learning_rate": 0.00013536923754374437, |
|
"loss": 0.9431, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.4473092230489704, |
|
"grad_norm": 0.4995253086090088, |
|
"learning_rate": 0.00013508658043450265, |
|
"loss": 0.9976, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 0.4481744246409413, |
|
"grad_norm": 0.4669324457645416, |
|
"learning_rate": 0.00013480360328341993, |
|
"loss": 1.0101, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.44903962623291227, |
|
"grad_norm": 0.44764748215675354, |
|
"learning_rate": 0.00013452030867166901, |
|
"loss": 0.9538, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 0.4499048278248832, |
|
"grad_norm": 0.46463918685913086, |
|
"learning_rate": 0.00013423669918331848, |
|
"loss": 0.9601, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.4507700294168541, |
|
"grad_norm": 0.45471301674842834, |
|
"learning_rate": 0.000133952777405309, |
|
"loss": 0.9727, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 0.4516352310088251, |
|
"grad_norm": 0.4745343029499054, |
|
"learning_rate": 0.00013366854592742984, |
|
"loss": 0.9931, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.452500432600796, |
|
"grad_norm": 0.43110033869743347, |
|
"learning_rate": 0.00013338400734229516, |
|
"loss": 0.9657, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 0.45336563419276693, |
|
"grad_norm": 0.48442110419273376, |
|
"learning_rate": 0.00013309916424532035, |
|
"loss": 0.9869, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.45423083578473783, |
|
"grad_norm": 0.4661529064178467, |
|
"learning_rate": 0.00013281401923469856, |
|
"loss": 0.9899, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 0.4550960373767088, |
|
"grad_norm": 0.4315500557422638, |
|
"learning_rate": 0.00013252857491137665, |
|
"loss": 0.9737, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.4559612389686797, |
|
"grad_norm": 0.43988022208213806, |
|
"learning_rate": 0.00013224283387903177, |
|
"loss": 0.9847, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 0.45682644056065064, |
|
"grad_norm": 0.49706265330314636, |
|
"learning_rate": 0.00013195679874404746, |
|
"loss": 0.9938, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.45769164215262154, |
|
"grad_norm": 0.48079583048820496, |
|
"learning_rate": 0.0001316704721154899, |
|
"loss": 0.9714, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 0.4585568437445925, |
|
"grad_norm": 0.4445064067840576, |
|
"learning_rate": 0.0001313838566050842, |
|
"loss": 0.9566, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.45942204533656344, |
|
"grad_norm": 0.46092909574508667, |
|
"learning_rate": 0.0001310969548271904, |
|
"loss": 0.9506, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 0.46028724692853434, |
|
"grad_norm": 0.4312794804573059, |
|
"learning_rate": 0.00013080976939877984, |
|
"loss": 0.957, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.4611524485205053, |
|
"grad_norm": 0.43655863404273987, |
|
"learning_rate": 0.00013052230293941107, |
|
"loss": 1.0075, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 0.4620176501124762, |
|
"grad_norm": 0.483980655670166, |
|
"learning_rate": 0.0001302345580712061, |
|
"loss": 0.9979, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.46288285170444715, |
|
"grad_norm": 0.4380267858505249, |
|
"learning_rate": 0.00012994653741882646, |
|
"loss": 0.9632, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 0.46374805329641805, |
|
"grad_norm": 0.4459783434867859, |
|
"learning_rate": 0.00012965824360944915, |
|
"loss": 0.9637, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.464613254888389, |
|
"grad_norm": 0.46052780747413635, |
|
"learning_rate": 0.00012936967927274298, |
|
"loss": 0.9944, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 0.4654784564803599, |
|
"grad_norm": 0.43334734439849854, |
|
"learning_rate": 0.00012908084704084415, |
|
"loss": 1.0045, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.46634365807233086, |
|
"grad_norm": 0.46884188055992126, |
|
"learning_rate": 0.00012879174954833256, |
|
"loss": 0.9334, |
|
"step": 2695 |
|
}, |
|
{ |
|
"epoch": 0.46720885966430176, |
|
"grad_norm": 0.46058720350265503, |
|
"learning_rate": 0.00012850238943220774, |
|
"loss": 1.0021, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.4680740612562727, |
|
"grad_norm": 0.46159979701042175, |
|
"learning_rate": 0.00012821276933186457, |
|
"loss": 0.9597, |
|
"step": 2705 |
|
}, |
|
{ |
|
"epoch": 0.46893926284824367, |
|
"grad_norm": 0.4704788029193878, |
|
"learning_rate": 0.00012792289188906953, |
|
"loss": 1.0088, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.46980446444021456, |
|
"grad_norm": 0.4625367820262909, |
|
"learning_rate": 0.00012763275974793636, |
|
"loss": 0.966, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 0.4706696660321855, |
|
"grad_norm": 0.5280613899230957, |
|
"learning_rate": 0.0001273423755549021, |
|
"loss": 0.9922, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.4715348676241564, |
|
"grad_norm": 0.4669839143753052, |
|
"learning_rate": 0.00012705174195870282, |
|
"loss": 1.0138, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 0.47240006921612737, |
|
"grad_norm": 0.4880688488483429, |
|
"learning_rate": 0.00012676086161034951, |
|
"loss": 0.9749, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.47326527080809827, |
|
"grad_norm": 0.46175360679626465, |
|
"learning_rate": 0.00012646973716310393, |
|
"loss": 0.9597, |
|
"step": 2735 |
|
}, |
|
{ |
|
"epoch": 0.4741304724000692, |
|
"grad_norm": 0.4644413888454437, |
|
"learning_rate": 0.00012617837127245441, |
|
"loss": 0.9941, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.4749956739920401, |
|
"grad_norm": 0.41886743903160095, |
|
"learning_rate": 0.00012588676659609159, |
|
"loss": 0.9234, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 0.4758608755840111, |
|
"grad_norm": 0.4897218346595764, |
|
"learning_rate": 0.00012559492579388416, |
|
"loss": 1.0088, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.476726077175982, |
|
"grad_norm": 0.47003859281539917, |
|
"learning_rate": 0.00012530285152785467, |
|
"loss": 1.0095, |
|
"step": 2755 |
|
}, |
|
{ |
|
"epoch": 0.47759127876795293, |
|
"grad_norm": 0.42755237221717834, |
|
"learning_rate": 0.00012501054646215515, |
|
"loss": 0.9769, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.4784564803599239, |
|
"grad_norm": 0.5013285279273987, |
|
"learning_rate": 0.00012471801326304293, |
|
"loss": 0.9739, |
|
"step": 2765 |
|
}, |
|
{ |
|
"epoch": 0.4793216819518948, |
|
"grad_norm": 0.4810338318347931, |
|
"learning_rate": 0.00012442525459885624, |
|
"loss": 1.0089, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.48018688354386574, |
|
"grad_norm": 0.42311739921569824, |
|
"learning_rate": 0.00012413227313998984, |
|
"loss": 0.9531, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 0.48105208513583664, |
|
"grad_norm": 0.46264195442199707, |
|
"learning_rate": 0.00012383907155887083, |
|
"loss": 0.9483, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.4819172867278076, |
|
"grad_norm": 0.4477481245994568, |
|
"learning_rate": 0.00012354565252993397, |
|
"loss": 0.9452, |
|
"step": 2785 |
|
}, |
|
{ |
|
"epoch": 0.4827824883197785, |
|
"grad_norm": 0.5055541396141052, |
|
"learning_rate": 0.0001232520187295976, |
|
"loss": 0.9698, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.48364768991174945, |
|
"grad_norm": 0.4845668375492096, |
|
"learning_rate": 0.00012295817283623908, |
|
"loss": 0.9613, |
|
"step": 2795 |
|
}, |
|
{ |
|
"epoch": 0.48451289150372034, |
|
"grad_norm": 0.47771093249320984, |
|
"learning_rate": 0.00012266411753017036, |
|
"loss": 0.9475, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.4853780930956913, |
|
"grad_norm": 0.43290895223617554, |
|
"learning_rate": 0.0001223698554936135, |
|
"loss": 0.9655, |
|
"step": 2805 |
|
}, |
|
{ |
|
"epoch": 0.48624329468766225, |
|
"grad_norm": 0.4561658799648285, |
|
"learning_rate": 0.00012207538941067637, |
|
"loss": 0.9628, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.48710849627963315, |
|
"grad_norm": 0.442929208278656, |
|
"learning_rate": 0.00012178072196732788, |
|
"loss": 1.0197, |
|
"step": 2815 |
|
}, |
|
{ |
|
"epoch": 0.4879736978716041, |
|
"grad_norm": 0.4500119686126709, |
|
"learning_rate": 0.00012148585585137377, |
|
"loss": 1.0237, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.488838899463575, |
|
"grad_norm": 0.44526350498199463, |
|
"learning_rate": 0.00012119079375243194, |
|
"loss": 0.9766, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 0.48970410105554596, |
|
"grad_norm": 0.48116928339004517, |
|
"learning_rate": 0.00012089553836190795, |
|
"loss": 0.994, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.49056930264751686, |
|
"grad_norm": 0.5110150575637817, |
|
"learning_rate": 0.00012060009237297046, |
|
"loss": 0.9642, |
|
"step": 2835 |
|
}, |
|
{ |
|
"epoch": 0.4914345042394878, |
|
"grad_norm": 0.4595610201358795, |
|
"learning_rate": 0.00012030445848052665, |
|
"loss": 0.9828, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.4922997058314587, |
|
"grad_norm": 0.4527970552444458, |
|
"learning_rate": 0.00012000863938119768, |
|
"loss": 0.9682, |
|
"step": 2845 |
|
}, |
|
{ |
|
"epoch": 0.49316490742342967, |
|
"grad_norm": 0.4377381503582001, |
|
"learning_rate": 0.00011971263777329406, |
|
"loss": 0.9387, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.49403010901540056, |
|
"grad_norm": 0.44428756833076477, |
|
"learning_rate": 0.00011941645635679111, |
|
"loss": 0.8939, |
|
"step": 2855 |
|
}, |
|
{ |
|
"epoch": 0.4948953106073715, |
|
"grad_norm": 0.5483399629592896, |
|
"learning_rate": 0.00011912009783330417, |
|
"loss": 0.9681, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.4957605121993425, |
|
"grad_norm": 0.5041677951812744, |
|
"learning_rate": 0.00011882356490606414, |
|
"loss": 0.9968, |
|
"step": 2865 |
|
}, |
|
{ |
|
"epoch": 0.4966257137913134, |
|
"grad_norm": 0.4788263440132141, |
|
"learning_rate": 0.00011852686027989273, |
|
"loss": 0.9272, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.4974909153832843, |
|
"grad_norm": 0.44364604353904724, |
|
"learning_rate": 0.00011822998666117775, |
|
"loss": 0.9746, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 0.4983561169752552, |
|
"grad_norm": 0.44387176632881165, |
|
"learning_rate": 0.00011793294675784853, |
|
"loss": 0.9396, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.4992213185672262, |
|
"grad_norm": 0.4744800627231598, |
|
"learning_rate": 0.00011763574327935113, |
|
"loss": 0.9756, |
|
"step": 2885 |
|
}, |
|
{ |
|
"epoch": 0.5000865201591971, |
|
"grad_norm": 0.420992910861969, |
|
"learning_rate": 0.00011733837893662369, |
|
"loss": 0.9588, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.500951721751168, |
|
"grad_norm": 0.47278472781181335, |
|
"learning_rate": 0.00011704085644207163, |
|
"loss": 0.9438, |
|
"step": 2895 |
|
}, |
|
{ |
|
"epoch": 0.501816923343139, |
|
"grad_norm": 0.4406570494174957, |
|
"learning_rate": 0.00011674317850954297, |
|
"loss": 0.9592, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.5026821249351099, |
|
"grad_norm": 0.4493499994277954, |
|
"learning_rate": 0.00011644534785430359, |
|
"loss": 0.9935, |
|
"step": 2905 |
|
}, |
|
{ |
|
"epoch": 0.5035473265270808, |
|
"grad_norm": 0.5532475709915161, |
|
"learning_rate": 0.00011614736719301236, |
|
"loss": 0.9957, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.5044125281190517, |
|
"grad_norm": 0.44797247648239136, |
|
"learning_rate": 0.00011584923924369646, |
|
"loss": 0.936, |
|
"step": 2915 |
|
}, |
|
{ |
|
"epoch": 0.5052777297110227, |
|
"grad_norm": 0.4524749219417572, |
|
"learning_rate": 0.00011555096672572659, |
|
"loss": 1.0383, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.5061429313029936, |
|
"grad_norm": 0.477090984582901, |
|
"learning_rate": 0.00011525255235979209, |
|
"loss": 0.9769, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 0.5070081328949645, |
|
"grad_norm": 0.42532771825790405, |
|
"learning_rate": 0.00011495399886787618, |
|
"loss": 0.963, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.5078733344869355, |
|
"grad_norm": 0.4601391553878784, |
|
"learning_rate": 0.0001146553089732311, |
|
"loss": 0.9738, |
|
"step": 2935 |
|
}, |
|
{ |
|
"epoch": 0.5087385360789064, |
|
"grad_norm": 0.4493047595024109, |
|
"learning_rate": 0.00011435648540035328, |
|
"loss": 0.9874, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.5096037376708773, |
|
"grad_norm": 0.5158471465110779, |
|
"learning_rate": 0.00011405753087495859, |
|
"loss": 0.9945, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 0.5104689392628482, |
|
"grad_norm": 0.4900023937225342, |
|
"learning_rate": 0.00011375844812395722, |
|
"loss": 0.9789, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.5113341408548192, |
|
"grad_norm": 0.4824017286300659, |
|
"learning_rate": 0.00011345923987542913, |
|
"loss": 0.9529, |
|
"step": 2955 |
|
}, |
|
{ |
|
"epoch": 0.5121993424467901, |
|
"grad_norm": 0.4537661075592041, |
|
"learning_rate": 0.00011315990885859885, |
|
"loss": 0.9865, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.513064544038761, |
|
"grad_norm": 0.48735764622688293, |
|
"learning_rate": 0.00011286045780381085, |
|
"loss": 1.007, |
|
"step": 2965 |
|
}, |
|
{ |
|
"epoch": 0.513929745630732, |
|
"grad_norm": 0.494494765996933, |
|
"learning_rate": 0.00011256088944250446, |
|
"loss": 0.9732, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.5147949472227029, |
|
"grad_norm": 0.4806153476238251, |
|
"learning_rate": 0.00011226120650718912, |
|
"loss": 0.983, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 0.5156601488146738, |
|
"grad_norm": 0.44597506523132324, |
|
"learning_rate": 0.00011196141173141926, |
|
"loss": 1.0055, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.5165253504066447, |
|
"grad_norm": 0.45618417859077454, |
|
"learning_rate": 0.00011166150784976945, |
|
"loss": 0.9788, |
|
"step": 2985 |
|
}, |
|
{ |
|
"epoch": 0.5173905519986157, |
|
"grad_norm": 0.4606229066848755, |
|
"learning_rate": 0.00011136149759780956, |
|
"loss": 0.95, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.5182557535905866, |
|
"grad_norm": 0.42977404594421387, |
|
"learning_rate": 0.00011106138371207963, |
|
"loss": 0.9855, |
|
"step": 2995 |
|
}, |
|
{ |
|
"epoch": 0.5191209551825575, |
|
"grad_norm": 0.45749431848526, |
|
"learning_rate": 0.00011076116893006505, |
|
"loss": 0.9748, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.5199861567745284, |
|
"grad_norm": 0.46687498688697815, |
|
"learning_rate": 0.00011046085599017147, |
|
"loss": 0.9932, |
|
"step": 3005 |
|
}, |
|
{ |
|
"epoch": 0.5208513583664994, |
|
"grad_norm": 0.4712628424167633, |
|
"learning_rate": 0.00011016044763169999, |
|
"loss": 0.9233, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.5217165599584703, |
|
"grad_norm": 0.43779537081718445, |
|
"learning_rate": 0.00010985994659482196, |
|
"loss": 0.9391, |
|
"step": 3015 |
|
}, |
|
{ |
|
"epoch": 0.5225817615504412, |
|
"grad_norm": 0.4388592839241028, |
|
"learning_rate": 0.00010955935562055415, |
|
"loss": 0.9942, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.5234469631424122, |
|
"grad_norm": 0.4575163424015045, |
|
"learning_rate": 0.00010925867745073368, |
|
"loss": 0.9802, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 0.5243121647343831, |
|
"grad_norm": 0.45447036623954773, |
|
"learning_rate": 0.00010895791482799303, |
|
"loss": 0.991, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.525177366326354, |
|
"grad_norm": 0.4621734917163849, |
|
"learning_rate": 0.00010865707049573501, |
|
"loss": 0.9547, |
|
"step": 3035 |
|
}, |
|
{ |
|
"epoch": 0.5260425679183249, |
|
"grad_norm": 0.4414147436618805, |
|
"learning_rate": 0.00010835614719810776, |
|
"loss": 0.9802, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.5269077695102959, |
|
"grad_norm": 0.447683721780777, |
|
"learning_rate": 0.00010805514767997966, |
|
"loss": 0.9481, |
|
"step": 3045 |
|
}, |
|
{ |
|
"epoch": 0.5277729711022668, |
|
"grad_norm": 0.43929868936538696, |
|
"learning_rate": 0.00010775407468691441, |
|
"loss": 0.9902, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.5286381726942377, |
|
"grad_norm": 0.4730943441390991, |
|
"learning_rate": 0.00010745293096514583, |
|
"loss": 0.9888, |
|
"step": 3055 |
|
}, |
|
{ |
|
"epoch": 0.5295033742862086, |
|
"grad_norm": 0.46589067578315735, |
|
"learning_rate": 0.00010715171926155294, |
|
"loss": 0.9888, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.5303685758781796, |
|
"grad_norm": 0.4755696952342987, |
|
"learning_rate": 0.00010685044232363481, |
|
"loss": 0.9891, |
|
"step": 3065 |
|
}, |
|
{ |
|
"epoch": 0.5312337774701505, |
|
"grad_norm": 0.46226081252098083, |
|
"learning_rate": 0.00010654910289948563, |
|
"loss": 0.9916, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.5320989790621214, |
|
"grad_norm": 0.42151981592178345, |
|
"learning_rate": 0.00010624770373776945, |
|
"loss": 0.9415, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 0.5329641806540925, |
|
"grad_norm": 0.46804869174957275, |
|
"learning_rate": 0.00010594624758769526, |
|
"loss": 0.9871, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.5338293822460634, |
|
"grad_norm": 0.4632691442966461, |
|
"learning_rate": 0.00010564473719899192, |
|
"loss": 0.9903, |
|
"step": 3085 |
|
}, |
|
{ |
|
"epoch": 0.5346945838380343, |
|
"grad_norm": 0.5181126594543457, |
|
"learning_rate": 0.00010534317532188296, |
|
"loss": 1.0247, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.5355597854300052, |
|
"grad_norm": 0.46891406178474426, |
|
"learning_rate": 0.00010504156470706157, |
|
"loss": 0.9627, |
|
"step": 3095 |
|
}, |
|
{ |
|
"epoch": 0.5364249870219762, |
|
"grad_norm": 0.44084757566452026, |
|
"learning_rate": 0.00010473990810566554, |
|
"loss": 0.9644, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.5372901886139471, |
|
"grad_norm": 0.4899301528930664, |
|
"learning_rate": 0.00010443820826925208, |
|
"loss": 0.9962, |
|
"step": 3105 |
|
}, |
|
{ |
|
"epoch": 0.538155390205918, |
|
"grad_norm": 0.5310002565383911, |
|
"learning_rate": 0.00010413646794977278, |
|
"loss": 0.9352, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.5390205917978889, |
|
"grad_norm": 0.45059874653816223, |
|
"learning_rate": 0.00010383468989954852, |
|
"loss": 1.0001, |
|
"step": 3115 |
|
}, |
|
{ |
|
"epoch": 0.5398857933898599, |
|
"grad_norm": 0.45391032099723816, |
|
"learning_rate": 0.00010353287687124435, |
|
"loss": 0.9482, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.5407509949818308, |
|
"grad_norm": 0.44090577960014343, |
|
"learning_rate": 0.0001032310316178443, |
|
"loss": 0.9858, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 0.5416161965738017, |
|
"grad_norm": 0.4861569404602051, |
|
"learning_rate": 0.00010292915689262642, |
|
"loss": 0.9731, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.5424813981657727, |
|
"grad_norm": 0.44434854388237, |
|
"learning_rate": 0.00010262725544913756, |
|
"loss": 0.9719, |
|
"step": 3135 |
|
}, |
|
{ |
|
"epoch": 0.5433465997577436, |
|
"grad_norm": 0.44749364256858826, |
|
"learning_rate": 0.00010232533004116822, |
|
"loss": 0.9864, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.5442118013497145, |
|
"grad_norm": 0.4296785593032837, |
|
"learning_rate": 0.00010202338342272756, |
|
"loss": 1.0131, |
|
"step": 3145 |
|
}, |
|
{ |
|
"epoch": 0.5450770029416854, |
|
"grad_norm": 0.47905465960502625, |
|
"learning_rate": 0.00010172141834801823, |
|
"loss": 0.9798, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.5459422045336564, |
|
"grad_norm": 0.43154534697532654, |
|
"learning_rate": 0.00010141943757141119, |
|
"loss": 0.9428, |
|
"step": 3155 |
|
}, |
|
{ |
|
"epoch": 0.5468074061256273, |
|
"grad_norm": 0.5342848300933838, |
|
"learning_rate": 0.00010111744384742064, |
|
"loss": 0.947, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.5476726077175982, |
|
"grad_norm": 0.4587153494358063, |
|
"learning_rate": 0.00010081543993067883, |
|
"loss": 1.0066, |
|
"step": 3165 |
|
}, |
|
{ |
|
"epoch": 0.5485378093095691, |
|
"grad_norm": 0.4687547981739044, |
|
"learning_rate": 0.00010051342857591104, |
|
"loss": 0.9945, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.5494030109015401, |
|
"grad_norm": 0.4243508577346802, |
|
"learning_rate": 0.0001002114125379104, |
|
"loss": 0.9973, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 0.550268212493511, |
|
"grad_norm": 0.49467694759368896, |
|
"learning_rate": 9.990939457151272e-05, |
|
"loss": 0.9942, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.5511334140854819, |
|
"grad_norm": 0.44364285469055176, |
|
"learning_rate": 9.96073774315714e-05, |
|
"loss": 0.9667, |
|
"step": 3185 |
|
}, |
|
{ |
|
"epoch": 0.5519986156774529, |
|
"grad_norm": 0.4399394094944, |
|
"learning_rate": 9.930536387293235e-05, |
|
"loss": 0.9902, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.5528638172694238, |
|
"grad_norm": 0.43766874074935913, |
|
"learning_rate": 9.900335665040876e-05, |
|
"loss": 0.9626, |
|
"step": 3195 |
|
}, |
|
{ |
|
"epoch": 0.5537290188613947, |
|
"grad_norm": 0.5083682537078857, |
|
"learning_rate": 9.870135851875605e-05, |
|
"loss": 0.971, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.5545942204533656, |
|
"grad_norm": 0.41614192724227905, |
|
"learning_rate": 9.839937223264674e-05, |
|
"loss": 1.0278, |
|
"step": 3205 |
|
}, |
|
{ |
|
"epoch": 0.5554594220453366, |
|
"grad_norm": 0.46571752429008484, |
|
"learning_rate": 9.80974005466452e-05, |
|
"loss": 0.9571, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.5563246236373075, |
|
"grad_norm": 0.46136540174484253, |
|
"learning_rate": 9.779544621518281e-05, |
|
"loss": 0.9669, |
|
"step": 3215 |
|
}, |
|
{ |
|
"epoch": 0.5571898252292784, |
|
"grad_norm": 0.5057949423789978, |
|
"learning_rate": 9.749351199253243e-05, |
|
"loss": 0.9876, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.5580550268212493, |
|
"grad_norm": 0.44634687900543213, |
|
"learning_rate": 9.719160063278371e-05, |
|
"loss": 0.9805, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 0.5589202284132203, |
|
"grad_norm": 0.4209567904472351, |
|
"learning_rate": 9.688971488981759e-05, |
|
"loss": 0.9621, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.5597854300051912, |
|
"grad_norm": 0.4183271825313568, |
|
"learning_rate": 9.658785751728144e-05, |
|
"loss": 0.9189, |
|
"step": 3235 |
|
}, |
|
{ |
|
"epoch": 0.5606506315971621, |
|
"grad_norm": 0.4438595175743103, |
|
"learning_rate": 9.628603126856378e-05, |
|
"loss": 1.0109, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.5615158331891331, |
|
"grad_norm": 0.4465368688106537, |
|
"learning_rate": 9.598423889676933e-05, |
|
"loss": 0.9747, |
|
"step": 3245 |
|
}, |
|
{ |
|
"epoch": 0.562381034781104, |
|
"grad_norm": 0.444805383682251, |
|
"learning_rate": 9.568248315469373e-05, |
|
"loss": 0.9712, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.5632462363730749, |
|
"grad_norm": 0.43554961681365967, |
|
"learning_rate": 9.53807667947985e-05, |
|
"loss": 1.0063, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 0.5641114379650458, |
|
"grad_norm": 0.4528721868991852, |
|
"learning_rate": 9.507909256918602e-05, |
|
"loss": 0.9849, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.5649766395570168, |
|
"grad_norm": 0.45312389731407166, |
|
"learning_rate": 9.477746322957416e-05, |
|
"loss": 1.0017, |
|
"step": 3265 |
|
}, |
|
{ |
|
"epoch": 0.5658418411489877, |
|
"grad_norm": 0.45233070850372314, |
|
"learning_rate": 9.447588152727161e-05, |
|
"loss": 1.0283, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.5667070427409586, |
|
"grad_norm": 0.44334331154823303, |
|
"learning_rate": 9.417435021315231e-05, |
|
"loss": 0.9917, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 0.5675722443329295, |
|
"grad_norm": 0.5259175896644592, |
|
"learning_rate": 9.387287203763072e-05, |
|
"loss": 0.962, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.5684374459249005, |
|
"grad_norm": 0.4928758144378662, |
|
"learning_rate": 9.357144975063661e-05, |
|
"loss": 0.9807, |
|
"step": 3285 |
|
}, |
|
{ |
|
"epoch": 0.5693026475168714, |
|
"grad_norm": 0.44045624136924744, |
|
"learning_rate": 9.327008610158981e-05, |
|
"loss": 0.9728, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.5701678491088423, |
|
"grad_norm": 0.46304404735565186, |
|
"learning_rate": 9.296878383937552e-05, |
|
"loss": 0.9755, |
|
"step": 3295 |
|
}, |
|
{ |
|
"epoch": 0.5710330507008133, |
|
"grad_norm": 0.45428958535194397, |
|
"learning_rate": 9.266754571231874e-05, |
|
"loss": 0.9927, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.5718982522927842, |
|
"grad_norm": 0.4710966944694519, |
|
"learning_rate": 9.236637446815969e-05, |
|
"loss": 0.9391, |
|
"step": 3305 |
|
}, |
|
{ |
|
"epoch": 0.5727634538847551, |
|
"grad_norm": 0.46206918358802795, |
|
"learning_rate": 9.206527285402835e-05, |
|
"loss": 0.9709, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.573628655476726, |
|
"grad_norm": 0.4327426254749298, |
|
"learning_rate": 9.176424361641965e-05, |
|
"loss": 0.9467, |
|
"step": 3315 |
|
}, |
|
{ |
|
"epoch": 0.574493857068697, |
|
"grad_norm": 0.4926101565361023, |
|
"learning_rate": 9.146328950116839e-05, |
|
"loss": 0.9767, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.5753590586606679, |
|
"grad_norm": 0.4621298313140869, |
|
"learning_rate": 9.116241325342399e-05, |
|
"loss": 0.99, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 0.5762242602526388, |
|
"grad_norm": 0.46005651354789734, |
|
"learning_rate": 9.086161761762578e-05, |
|
"loss": 0.9796, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.5770894618446097, |
|
"grad_norm": 0.4343353807926178, |
|
"learning_rate": 9.056090533747762e-05, |
|
"loss": 0.9349, |
|
"step": 3335 |
|
}, |
|
{ |
|
"epoch": 0.5779546634365808, |
|
"grad_norm": 0.4175094962120056, |
|
"learning_rate": 9.026027915592321e-05, |
|
"loss": 0.9857, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.5788198650285516, |
|
"grad_norm": 0.44060176610946655, |
|
"learning_rate": 8.995974181512072e-05, |
|
"loss": 0.9523, |
|
"step": 3345 |
|
}, |
|
{ |
|
"epoch": 0.5796850666205225, |
|
"grad_norm": 0.4819500148296356, |
|
"learning_rate": 8.965929605641811e-05, |
|
"loss": 0.9838, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.5805502682124936, |
|
"grad_norm": 0.4309912919998169, |
|
"learning_rate": 8.935894462032798e-05, |
|
"loss": 0.9787, |
|
"step": 3355 |
|
}, |
|
{ |
|
"epoch": 0.5814154698044645, |
|
"grad_norm": 0.440983384847641, |
|
"learning_rate": 8.905869024650242e-05, |
|
"loss": 0.9485, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.5822806713964354, |
|
"grad_norm": 0.4531494975090027, |
|
"learning_rate": 8.875853567370829e-05, |
|
"loss": 0.9819, |
|
"step": 3365 |
|
}, |
|
{ |
|
"epoch": 0.5831458729884063, |
|
"grad_norm": 0.43064072728157043, |
|
"learning_rate": 8.845848363980207e-05, |
|
"loss": 0.9748, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.5840110745803773, |
|
"grad_norm": 0.46520137786865234, |
|
"learning_rate": 8.815853688170498e-05, |
|
"loss": 0.9634, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 0.5848762761723482, |
|
"grad_norm": 0.5070540904998779, |
|
"learning_rate": 8.785869813537789e-05, |
|
"loss": 1.011, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.5857414777643191, |
|
"grad_norm": 0.4707706868648529, |
|
"learning_rate": 8.75589701357965e-05, |
|
"loss": 0.9667, |
|
"step": 3385 |
|
}, |
|
{ |
|
"epoch": 0.58660667935629, |
|
"grad_norm": 0.4585541784763336, |
|
"learning_rate": 8.725935561692637e-05, |
|
"loss": 0.9676, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.587471880948261, |
|
"grad_norm": 0.4634505808353424, |
|
"learning_rate": 8.695985731169781e-05, |
|
"loss": 0.9875, |
|
"step": 3395 |
|
}, |
|
{ |
|
"epoch": 0.5883370825402319, |
|
"grad_norm": 0.4741441011428833, |
|
"learning_rate": 8.666047795198124e-05, |
|
"loss": 0.9787, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.5892022841322028, |
|
"grad_norm": 0.44273069500923157, |
|
"learning_rate": 8.636122026856201e-05, |
|
"loss": 0.9481, |
|
"step": 3405 |
|
}, |
|
{ |
|
"epoch": 0.5900674857241738, |
|
"grad_norm": 0.45366448163986206, |
|
"learning_rate": 8.606208699111573e-05, |
|
"loss": 0.9577, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.5909326873161447, |
|
"grad_norm": 0.4686761498451233, |
|
"learning_rate": 8.576308084818305e-05, |
|
"loss": 0.9743, |
|
"step": 3415 |
|
}, |
|
{ |
|
"epoch": 0.5917978889081156, |
|
"grad_norm": 0.4535995125770569, |
|
"learning_rate": 8.546420456714517e-05, |
|
"loss": 0.9928, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.5926630905000865, |
|
"grad_norm": 0.426045298576355, |
|
"learning_rate": 8.516546087419856e-05, |
|
"loss": 0.9829, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 0.5935282920920575, |
|
"grad_norm": 0.4387094974517822, |
|
"learning_rate": 8.486685249433048e-05, |
|
"loss": 0.9899, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.5943934936840284, |
|
"grad_norm": 0.4502699375152588, |
|
"learning_rate": 8.456838215129384e-05, |
|
"loss": 0.998, |
|
"step": 3435 |
|
}, |
|
{ |
|
"epoch": 0.5952586952759993, |
|
"grad_norm": 0.4558119773864746, |
|
"learning_rate": 8.427005256758236e-05, |
|
"loss": 0.9493, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.5961238968679702, |
|
"grad_norm": 0.4319314658641815, |
|
"learning_rate": 8.397186646440603e-05, |
|
"loss": 0.9276, |
|
"step": 3445 |
|
}, |
|
{ |
|
"epoch": 0.5969890984599412, |
|
"grad_norm": 0.4241703748703003, |
|
"learning_rate": 8.367382656166584e-05, |
|
"loss": 0.9368, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.5978543000519121, |
|
"grad_norm": 0.4475589394569397, |
|
"learning_rate": 8.33759355779295e-05, |
|
"loss": 0.9648, |
|
"step": 3455 |
|
}, |
|
{ |
|
"epoch": 0.598719501643883, |
|
"grad_norm": 0.4357362389564514, |
|
"learning_rate": 8.307819623040606e-05, |
|
"loss": 0.9449, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.599584703235854, |
|
"grad_norm": 0.5170083045959473, |
|
"learning_rate": 8.278061123492162e-05, |
|
"loss": 0.9446, |
|
"step": 3465 |
|
}, |
|
{ |
|
"epoch": 0.6004499048278249, |
|
"grad_norm": 0.45184993743896484, |
|
"learning_rate": 8.248318330589437e-05, |
|
"loss": 0.9494, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.6013151064197958, |
|
"grad_norm": 0.44678378105163574, |
|
"learning_rate": 8.218591515630962e-05, |
|
"loss": 0.951, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 0.6021803080117667, |
|
"grad_norm": 0.4784872531890869, |
|
"learning_rate": 8.188880949769552e-05, |
|
"loss": 1.0141, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.6030455096037377, |
|
"grad_norm": 0.4891086220741272, |
|
"learning_rate": 8.159186904009783e-05, |
|
"loss": 0.9438, |
|
"step": 3485 |
|
}, |
|
{ |
|
"epoch": 0.6039107111957086, |
|
"grad_norm": 0.4449233412742615, |
|
"learning_rate": 8.129509649205558e-05, |
|
"loss": 0.9473, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.6047759127876795, |
|
"grad_norm": 0.4476933479309082, |
|
"learning_rate": 8.099849456057613e-05, |
|
"loss": 0.9215, |
|
"step": 3495 |
|
}, |
|
{ |
|
"epoch": 0.6056411143796505, |
|
"grad_norm": 0.47453024983406067, |
|
"learning_rate": 8.070206595111061e-05, |
|
"loss": 0.9419, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.6065063159716214, |
|
"grad_norm": 0.4311204254627228, |
|
"learning_rate": 8.040581336752921e-05, |
|
"loss": 0.9707, |
|
"step": 3505 |
|
}, |
|
{ |
|
"epoch": 0.6073715175635923, |
|
"grad_norm": 0.4637129008769989, |
|
"learning_rate": 8.010973951209642e-05, |
|
"loss": 0.994, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.6082367191555632, |
|
"grad_norm": 0.42446842789649963, |
|
"learning_rate": 7.981384708544658e-05, |
|
"loss": 0.9716, |
|
"step": 3515 |
|
}, |
|
{ |
|
"epoch": 0.6091019207475342, |
|
"grad_norm": 0.4456198811531067, |
|
"learning_rate": 7.951813878655898e-05, |
|
"loss": 0.9435, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.6099671223395051, |
|
"grad_norm": 0.4548759162425995, |
|
"learning_rate": 7.922261731273359e-05, |
|
"loss": 0.9703, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 0.610832323931476, |
|
"grad_norm": 0.4350235164165497, |
|
"learning_rate": 7.892728535956601e-05, |
|
"loss": 0.957, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.6116975255234469, |
|
"grad_norm": 0.4410596191883087, |
|
"learning_rate": 7.863214562092336e-05, |
|
"loss": 0.9588, |
|
"step": 3535 |
|
}, |
|
{ |
|
"epoch": 0.6125627271154179, |
|
"grad_norm": 0.46627217531204224, |
|
"learning_rate": 7.833720078891938e-05, |
|
"loss": 0.9848, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.6134279287073888, |
|
"grad_norm": 0.44155973196029663, |
|
"learning_rate": 7.804245355388994e-05, |
|
"loss": 0.9629, |
|
"step": 3545 |
|
}, |
|
{ |
|
"epoch": 0.6142931302993597, |
|
"grad_norm": 0.4281848669052124, |
|
"learning_rate": 7.774790660436858e-05, |
|
"loss": 0.9505, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.6151583318913307, |
|
"grad_norm": 0.4397382140159607, |
|
"learning_rate": 7.745356262706189e-05, |
|
"loss": 0.9638, |
|
"step": 3555 |
|
}, |
|
{ |
|
"epoch": 0.6160235334833016, |
|
"grad_norm": 0.44392621517181396, |
|
"learning_rate": 7.715942430682515e-05, |
|
"loss": 1.0033, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.6168887350752725, |
|
"grad_norm": 0.4472537338733673, |
|
"learning_rate": 7.686549432663757e-05, |
|
"loss": 0.9348, |
|
"step": 3565 |
|
}, |
|
{ |
|
"epoch": 0.6177539366672434, |
|
"grad_norm": 0.4813332259654999, |
|
"learning_rate": 7.657177536757816e-05, |
|
"loss": 0.9431, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.6186191382592144, |
|
"grad_norm": 0.4196160137653351, |
|
"learning_rate": 7.627827010880108e-05, |
|
"loss": 0.9661, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 0.6194843398511853, |
|
"grad_norm": 0.43365371227264404, |
|
"learning_rate": 7.598498122751108e-05, |
|
"loss": 0.9446, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.6203495414431562, |
|
"grad_norm": 0.44143837690353394, |
|
"learning_rate": 7.569191139893944e-05, |
|
"loss": 0.9582, |
|
"step": 3585 |
|
}, |
|
{ |
|
"epoch": 0.6212147430351271, |
|
"grad_norm": 0.455301970243454, |
|
"learning_rate": 7.539906329631911e-05, |
|
"loss": 0.9487, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.6220799446270981, |
|
"grad_norm": 0.4444476366043091, |
|
"learning_rate": 7.510643959086083e-05, |
|
"loss": 0.9818, |
|
"step": 3595 |
|
}, |
|
{ |
|
"epoch": 0.622945146219069, |
|
"grad_norm": 0.4136192500591278, |
|
"learning_rate": 7.481404295172827e-05, |
|
"loss": 1.0015, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.62381034781104, |
|
"grad_norm": 0.42628371715545654, |
|
"learning_rate": 7.452187604601407e-05, |
|
"loss": 0.98, |
|
"step": 3605 |
|
}, |
|
{ |
|
"epoch": 0.624675549403011, |
|
"grad_norm": 0.4436239004135132, |
|
"learning_rate": 7.422994153871534e-05, |
|
"loss": 0.9909, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.6255407509949819, |
|
"grad_norm": 0.4337378144264221, |
|
"learning_rate": 7.393824209270925e-05, |
|
"loss": 0.9714, |
|
"step": 3615 |
|
}, |
|
{ |
|
"epoch": 0.6264059525869528, |
|
"grad_norm": 0.46607285737991333, |
|
"learning_rate": 7.364678036872903e-05, |
|
"loss": 0.9933, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.6272711541789237, |
|
"grad_norm": 0.4370154142379761, |
|
"learning_rate": 7.335555902533941e-05, |
|
"loss": 0.987, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 0.6281363557708947, |
|
"grad_norm": 0.44606542587280273, |
|
"learning_rate": 7.306458071891258e-05, |
|
"loss": 0.9482, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.6290015573628656, |
|
"grad_norm": 0.5535009503364563, |
|
"learning_rate": 7.277384810360375e-05, |
|
"loss": 0.9597, |
|
"step": 3635 |
|
}, |
|
{ |
|
"epoch": 0.6298667589548365, |
|
"grad_norm": 0.5309751033782959, |
|
"learning_rate": 7.248336383132718e-05, |
|
"loss": 0.9586, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.6307319605468074, |
|
"grad_norm": 0.4356056749820709, |
|
"learning_rate": 7.219313055173186e-05, |
|
"loss": 0.9881, |
|
"step": 3645 |
|
}, |
|
{ |
|
"epoch": 0.6315971621387784, |
|
"grad_norm": 0.4677325487136841, |
|
"learning_rate": 7.190315091217728e-05, |
|
"loss": 0.963, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.6324623637307493, |
|
"grad_norm": 0.4469841420650482, |
|
"learning_rate": 7.161342755770942e-05, |
|
"loss": 0.9331, |
|
"step": 3655 |
|
}, |
|
{ |
|
"epoch": 0.6333275653227202, |
|
"grad_norm": 0.4805571138858795, |
|
"learning_rate": 7.132396313103651e-05, |
|
"loss": 0.9428, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.6341927669146912, |
|
"grad_norm": 0.43493425846099854, |
|
"learning_rate": 7.1034760272505e-05, |
|
"loss": 0.9357, |
|
"step": 3665 |
|
}, |
|
{ |
|
"epoch": 0.6350579685066621, |
|
"grad_norm": 0.44341421127319336, |
|
"learning_rate": 7.074582162007546e-05, |
|
"loss": 0.9954, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.635923170098633, |
|
"grad_norm": 0.47012633085250854, |
|
"learning_rate": 7.045714980929845e-05, |
|
"loss": 0.9575, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 0.6367883716906039, |
|
"grad_norm": 0.4581446647644043, |
|
"learning_rate": 7.016874747329068e-05, |
|
"loss": 0.9845, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.6376535732825749, |
|
"grad_norm": 0.40438956022262573, |
|
"learning_rate": 6.988061724271065e-05, |
|
"loss": 0.9699, |
|
"step": 3685 |
|
}, |
|
{ |
|
"epoch": 0.6385187748745458, |
|
"grad_norm": 0.4338352680206299, |
|
"learning_rate": 6.959276174573503e-05, |
|
"loss": 0.9328, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.6393839764665167, |
|
"grad_norm": 0.4316514730453491, |
|
"learning_rate": 6.930518360803438e-05, |
|
"loss": 0.9436, |
|
"step": 3695 |
|
}, |
|
{ |
|
"epoch": 0.6402491780584876, |
|
"grad_norm": 0.4489031434059143, |
|
"learning_rate": 6.90178854527494e-05, |
|
"loss": 0.9399, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.6411143796504586, |
|
"grad_norm": 0.42906907200813293, |
|
"learning_rate": 6.873086990046691e-05, |
|
"loss": 0.9655, |
|
"step": 3705 |
|
}, |
|
{ |
|
"epoch": 0.6419795812424295, |
|
"grad_norm": 0.42408278584480286, |
|
"learning_rate": 6.844413956919603e-05, |
|
"loss": 0.9566, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.6428447828344004, |
|
"grad_norm": 0.4122326076030731, |
|
"learning_rate": 6.815769707434411e-05, |
|
"loss": 0.9992, |
|
"step": 3715 |
|
}, |
|
{ |
|
"epoch": 0.6437099844263714, |
|
"grad_norm": 0.4537835121154785, |
|
"learning_rate": 6.787154502869313e-05, |
|
"loss": 0.9384, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.6445751860183423, |
|
"grad_norm": 0.4500395953655243, |
|
"learning_rate": 6.758568604237568e-05, |
|
"loss": 0.9851, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 0.6454403876103132, |
|
"grad_norm": 0.43696606159210205, |
|
"learning_rate": 6.730012272285123e-05, |
|
"loss": 0.942, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.6463055892022841, |
|
"grad_norm": 0.42087531089782715, |
|
"learning_rate": 6.701485767488235e-05, |
|
"loss": 0.9386, |
|
"step": 3735 |
|
}, |
|
{ |
|
"epoch": 0.6471707907942551, |
|
"grad_norm": 0.4692328870296478, |
|
"learning_rate": 6.672989350051082e-05, |
|
"loss": 0.9937, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.648035992386226, |
|
"grad_norm": 0.4377414584159851, |
|
"learning_rate": 6.644523279903419e-05, |
|
"loss": 0.953, |
|
"step": 3745 |
|
}, |
|
{ |
|
"epoch": 0.6489011939781969, |
|
"grad_norm": 0.5129314661026001, |
|
"learning_rate": 6.61608781669817e-05, |
|
"loss": 0.9555, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.6497663955701678, |
|
"grad_norm": 0.452594131231308, |
|
"learning_rate": 6.587683219809089e-05, |
|
"loss": 0.9346, |
|
"step": 3755 |
|
}, |
|
{ |
|
"epoch": 0.6506315971621388, |
|
"grad_norm": 0.4192207157611847, |
|
"learning_rate": 6.559309748328388e-05, |
|
"loss": 0.9461, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.6514967987541097, |
|
"grad_norm": 0.4243721067905426, |
|
"learning_rate": 6.530967661064348e-05, |
|
"loss": 0.9644, |
|
"step": 3765 |
|
}, |
|
{ |
|
"epoch": 0.6523620003460806, |
|
"grad_norm": 0.4021894931793213, |
|
"learning_rate": 6.502657216539007e-05, |
|
"loss": 0.9608, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.6532272019380516, |
|
"grad_norm": 0.43916577100753784, |
|
"learning_rate": 6.474378672985749e-05, |
|
"loss": 0.9321, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 0.6540924035300225, |
|
"grad_norm": 0.4375644326210022, |
|
"learning_rate": 6.446132288346994e-05, |
|
"loss": 0.9507, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.6549576051219934, |
|
"grad_norm": 0.45128095149993896, |
|
"learning_rate": 6.417918320271807e-05, |
|
"loss": 0.9804, |
|
"step": 3785 |
|
}, |
|
{ |
|
"epoch": 0.6558228067139643, |
|
"grad_norm": 0.45634618401527405, |
|
"learning_rate": 6.389737026113577e-05, |
|
"loss": 0.9579, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.6566880083059353, |
|
"grad_norm": 0.45252084732055664, |
|
"learning_rate": 6.36158866292766e-05, |
|
"loss": 0.9811, |
|
"step": 3795 |
|
}, |
|
{ |
|
"epoch": 0.6575532098979062, |
|
"grad_norm": 0.4186381697654724, |
|
"learning_rate": 6.333473487469021e-05, |
|
"loss": 0.983, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.6584184114898771, |
|
"grad_norm": 0.43495872616767883, |
|
"learning_rate": 6.305391756189916e-05, |
|
"loss": 1.0123, |
|
"step": 3805 |
|
}, |
|
{ |
|
"epoch": 0.659283613081848, |
|
"grad_norm": 0.42767274379730225, |
|
"learning_rate": 6.27734372523753e-05, |
|
"loss": 0.9882, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.660148814673819, |
|
"grad_norm": 0.44282782077789307, |
|
"learning_rate": 6.249329650451663e-05, |
|
"loss": 0.9771, |
|
"step": 3815 |
|
}, |
|
{ |
|
"epoch": 0.6610140162657899, |
|
"grad_norm": 0.4403495490550995, |
|
"learning_rate": 6.221349787362366e-05, |
|
"loss": 1.0058, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.6618792178577608, |
|
"grad_norm": 0.4575168490409851, |
|
"learning_rate": 6.193404391187647e-05, |
|
"loss": 0.9948, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 0.6627444194497318, |
|
"grad_norm": 0.42425480484962463, |
|
"learning_rate": 6.165493716831119e-05, |
|
"loss": 0.9357, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.6636096210417027, |
|
"grad_norm": 0.4653586447238922, |
|
"learning_rate": 6.137618018879671e-05, |
|
"loss": 0.973, |
|
"step": 3835 |
|
}, |
|
{ |
|
"epoch": 0.6644748226336736, |
|
"grad_norm": 0.4468865692615509, |
|
"learning_rate": 6.109777551601165e-05, |
|
"loss": 0.9675, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.6653400242256445, |
|
"grad_norm": 0.4487997889518738, |
|
"learning_rate": 6.0819725689421046e-05, |
|
"loss": 0.9482, |
|
"step": 3845 |
|
}, |
|
{ |
|
"epoch": 0.6662052258176155, |
|
"grad_norm": 0.4328047037124634, |
|
"learning_rate": 6.054203324525321e-05, |
|
"loss": 0.9853, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.6670704274095864, |
|
"grad_norm": 0.4487217664718628, |
|
"learning_rate": 6.026470071647656e-05, |
|
"loss": 0.9879, |
|
"step": 3855 |
|
}, |
|
{ |
|
"epoch": 0.6679356290015573, |
|
"grad_norm": 0.5115017294883728, |
|
"learning_rate": 5.9987730632776584e-05, |
|
"loss": 0.9804, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.6688008305935282, |
|
"grad_norm": 0.494248628616333, |
|
"learning_rate": 5.9711125520532754e-05, |
|
"loss": 0.9657, |
|
"step": 3865 |
|
}, |
|
{ |
|
"epoch": 0.6696660321854992, |
|
"grad_norm": 0.4214867949485779, |
|
"learning_rate": 5.943488790279539e-05, |
|
"loss": 0.9687, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.6705312337774701, |
|
"grad_norm": 0.4864426851272583, |
|
"learning_rate": 5.9159020299262793e-05, |
|
"loss": 0.9391, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 0.671396435369441, |
|
"grad_norm": 0.4803209602832794, |
|
"learning_rate": 5.888352522625809e-05, |
|
"loss": 0.9195, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.6722616369614121, |
|
"grad_norm": 0.42843303084373474, |
|
"learning_rate": 5.860840519670648e-05, |
|
"loss": 0.9757, |
|
"step": 3885 |
|
}, |
|
{ |
|
"epoch": 0.673126838553383, |
|
"grad_norm": 0.4331001341342926, |
|
"learning_rate": 5.833366272011212e-05, |
|
"loss": 0.9961, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.6739920401453539, |
|
"grad_norm": 0.43525782227516174, |
|
"learning_rate": 5.805930030253539e-05, |
|
"loss": 0.9764, |
|
"step": 3895 |
|
}, |
|
{ |
|
"epoch": 0.6748572417373248, |
|
"grad_norm": 0.4345535933971405, |
|
"learning_rate": 5.7785320446569924e-05, |
|
"loss": 0.9753, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.6757224433292958, |
|
"grad_norm": 0.44036418199539185, |
|
"learning_rate": 5.7511725651319834e-05, |
|
"loss": 0.9439, |
|
"step": 3905 |
|
}, |
|
{ |
|
"epoch": 0.6765876449212667, |
|
"grad_norm": 0.42969611287117004, |
|
"learning_rate": 5.7238518412376906e-05, |
|
"loss": 0.9827, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.6774528465132376, |
|
"grad_norm": 0.42602795362472534, |
|
"learning_rate": 5.696570122179783e-05, |
|
"loss": 0.9341, |
|
"step": 3915 |
|
}, |
|
{ |
|
"epoch": 0.6783180481052085, |
|
"grad_norm": 0.4188039302825928, |
|
"learning_rate": 5.66932765680815e-05, |
|
"loss": 0.955, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.6791832496971795, |
|
"grad_norm": 0.49803611636161804, |
|
"learning_rate": 5.6421246936146255e-05, |
|
"loss": 0.9364, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 0.6800484512891504, |
|
"grad_norm": 0.46372202038764954, |
|
"learning_rate": 5.614961480730727e-05, |
|
"loss": 0.9426, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.6809136528811213, |
|
"grad_norm": 0.4493297040462494, |
|
"learning_rate": 5.587838265925389e-05, |
|
"loss": 0.9297, |
|
"step": 3935 |
|
}, |
|
{ |
|
"epoch": 0.6817788544730923, |
|
"grad_norm": 0.43697822093963623, |
|
"learning_rate": 5.5607552966027044e-05, |
|
"loss": 0.9581, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.6826440560650632, |
|
"grad_norm": 0.44038650393486023, |
|
"learning_rate": 5.533712819799667e-05, |
|
"loss": 1.0135, |
|
"step": 3945 |
|
}, |
|
{ |
|
"epoch": 0.6835092576570341, |
|
"grad_norm": 0.44993671774864197, |
|
"learning_rate": 5.5067110821839186e-05, |
|
"loss": 0.9798, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.684374459249005, |
|
"grad_norm": 0.4506613314151764, |
|
"learning_rate": 5.4797503300515e-05, |
|
"loss": 0.9571, |
|
"step": 3955 |
|
}, |
|
{ |
|
"epoch": 0.685239660840976, |
|
"grad_norm": 0.4558075964450836, |
|
"learning_rate": 5.452830809324601e-05, |
|
"loss": 0.9611, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.6861048624329469, |
|
"grad_norm": 0.4847477376461029, |
|
"learning_rate": 5.425952765549319e-05, |
|
"loss": 1.0172, |
|
"step": 3965 |
|
}, |
|
{ |
|
"epoch": 0.6869700640249178, |
|
"grad_norm": 0.4571908414363861, |
|
"learning_rate": 5.399116443893433e-05, |
|
"loss": 0.9883, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.6878352656168887, |
|
"grad_norm": 0.41807690262794495, |
|
"learning_rate": 5.3723220891441274e-05, |
|
"loss": 1.0335, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 0.6887004672088597, |
|
"grad_norm": 0.4681232273578644, |
|
"learning_rate": 5.345569945705817e-05, |
|
"loss": 0.9219, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.6895656688008306, |
|
"grad_norm": 0.4585653841495514, |
|
"learning_rate": 5.3188602575978616e-05, |
|
"loss": 1.0058, |
|
"step": 3985 |
|
}, |
|
{ |
|
"epoch": 0.6904308703928015, |
|
"grad_norm": 0.41353005170822144, |
|
"learning_rate": 5.292193268452377e-05, |
|
"loss": 0.9544, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.6912960719847725, |
|
"grad_norm": 0.4379875361919403, |
|
"learning_rate": 5.265569221511999e-05, |
|
"loss": 0.9619, |
|
"step": 3995 |
|
}, |
|
{ |
|
"epoch": 0.6921612735767434, |
|
"grad_norm": 0.45931750535964966, |
|
"learning_rate": 5.238988359627662e-05, |
|
"loss": 0.9852, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.6930264751687143, |
|
"grad_norm": 0.46481239795684814, |
|
"learning_rate": 5.212450925256406e-05, |
|
"loss": 0.9666, |
|
"step": 4005 |
|
}, |
|
{ |
|
"epoch": 0.6938916767606852, |
|
"grad_norm": 0.4746643304824829, |
|
"learning_rate": 5.185957160459119e-05, |
|
"loss": 0.9852, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 0.6947568783526562, |
|
"grad_norm": 0.4331592321395874, |
|
"learning_rate": 5.159507306898386e-05, |
|
"loss": 0.9581, |
|
"step": 4015 |
|
}, |
|
{ |
|
"epoch": 0.6956220799446271, |
|
"grad_norm": 0.44027334451675415, |
|
"learning_rate": 5.133101605836228e-05, |
|
"loss": 0.9949, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 0.696487281536598, |
|
"grad_norm": 0.42148056626319885, |
|
"learning_rate": 5.106740298131961e-05, |
|
"loss": 0.961, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 0.697352483128569, |
|
"grad_norm": 0.41860854625701904, |
|
"learning_rate": 5.080423624239936e-05, |
|
"loss": 0.9602, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 0.6982176847205399, |
|
"grad_norm": 0.4259825348854065, |
|
"learning_rate": 5.054151824207405e-05, |
|
"loss": 0.9516, |
|
"step": 4035 |
|
}, |
|
{ |
|
"epoch": 0.6990828863125108, |
|
"grad_norm": 0.44930359721183777, |
|
"learning_rate": 5.027925137672278e-05, |
|
"loss": 0.9646, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 0.6999480879044817, |
|
"grad_norm": 0.45277413725852966, |
|
"learning_rate": 5.0017438038609846e-05, |
|
"loss": 0.9787, |
|
"step": 4045 |
|
}, |
|
{ |
|
"epoch": 0.7008132894964527, |
|
"grad_norm": 0.45245739817619324, |
|
"learning_rate": 4.975608061586258e-05, |
|
"loss": 0.9823, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.7016784910884236, |
|
"grad_norm": 0.44991815090179443, |
|
"learning_rate": 4.9495181492449724e-05, |
|
"loss": 0.9416, |
|
"step": 4055 |
|
}, |
|
{ |
|
"epoch": 0.7025436926803945, |
|
"grad_norm": 0.42899975180625916, |
|
"learning_rate": 4.923474304815967e-05, |
|
"loss": 0.987, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 0.7034088942723654, |
|
"grad_norm": 0.4850844442844391, |
|
"learning_rate": 4.897476765857858e-05, |
|
"loss": 0.9856, |
|
"step": 4065 |
|
}, |
|
{ |
|
"epoch": 0.7042740958643364, |
|
"grad_norm": 0.42250022292137146, |
|
"learning_rate": 4.871525769506916e-05, |
|
"loss": 0.9327, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 0.7051392974563073, |
|
"grad_norm": 0.4618523120880127, |
|
"learning_rate": 4.8456215524748396e-05, |
|
"loss": 1.017, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 0.7060044990482782, |
|
"grad_norm": 0.42984339594841003, |
|
"learning_rate": 4.819764351046659e-05, |
|
"loss": 0.9534, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 0.7068697006402492, |
|
"grad_norm": 0.43350252509117126, |
|
"learning_rate": 4.7939544010785377e-05, |
|
"loss": 0.9516, |
|
"step": 4085 |
|
}, |
|
{ |
|
"epoch": 0.7077349022322201, |
|
"grad_norm": 0.4169905185699463, |
|
"learning_rate": 4.768191937995636e-05, |
|
"loss": 0.9477, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 0.708600103824191, |
|
"grad_norm": 0.40108850598335266, |
|
"learning_rate": 4.74247719678997e-05, |
|
"loss": 0.9751, |
|
"step": 4095 |
|
}, |
|
{ |
|
"epoch": 0.7094653054161619, |
|
"grad_norm": 0.4502684772014618, |
|
"learning_rate": 4.716810412018251e-05, |
|
"loss": 0.9778, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.7103305070081329, |
|
"grad_norm": 0.4119910001754761, |
|
"learning_rate": 4.691191817799765e-05, |
|
"loss": 0.9507, |
|
"step": 4105 |
|
}, |
|
{ |
|
"epoch": 0.7111957086001038, |
|
"grad_norm": 0.43756359815597534, |
|
"learning_rate": 4.665621647814226e-05, |
|
"loss": 0.9897, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 0.7120609101920747, |
|
"grad_norm": 0.44212594628334045, |
|
"learning_rate": 4.640100135299644e-05, |
|
"loss": 0.9555, |
|
"step": 4115 |
|
}, |
|
{ |
|
"epoch": 0.7129261117840456, |
|
"grad_norm": 0.464538037776947, |
|
"learning_rate": 4.614627513050205e-05, |
|
"loss": 0.9533, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 0.7137913133760166, |
|
"grad_norm": 0.42958030104637146, |
|
"learning_rate": 4.589204013414139e-05, |
|
"loss": 1.0032, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 0.7146565149679875, |
|
"grad_norm": 0.45080655813217163, |
|
"learning_rate": 4.563829868291607e-05, |
|
"loss": 0.9501, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 0.7155217165599584, |
|
"grad_norm": 0.4486967623233795, |
|
"learning_rate": 4.538505309132584e-05, |
|
"loss": 0.9779, |
|
"step": 4135 |
|
}, |
|
{ |
|
"epoch": 0.7163869181519295, |
|
"grad_norm": 0.4742283821105957, |
|
"learning_rate": 4.513230566934743e-05, |
|
"loss": 0.9393, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 0.7172521197439004, |
|
"grad_norm": 0.4522545635700226, |
|
"learning_rate": 4.4880058722413566e-05, |
|
"loss": 0.9623, |
|
"step": 4145 |
|
}, |
|
{ |
|
"epoch": 0.7181173213358713, |
|
"grad_norm": 0.4180852472782135, |
|
"learning_rate": 4.462831455139185e-05, |
|
"loss": 0.9544, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 0.7189825229278421, |
|
"grad_norm": 0.4502962827682495, |
|
"learning_rate": 4.4377075452563955e-05, |
|
"loss": 0.9749, |
|
"step": 4155 |
|
}, |
|
{ |
|
"epoch": 0.7198477245198132, |
|
"grad_norm": 0.42445746064186096, |
|
"learning_rate": 4.412634371760429e-05, |
|
"loss": 0.9975, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 0.7207129261117841, |
|
"grad_norm": 0.4130094051361084, |
|
"learning_rate": 4.387612163355966e-05, |
|
"loss": 1.0015, |
|
"step": 4165 |
|
}, |
|
{ |
|
"epoch": 0.721578127703755, |
|
"grad_norm": 0.5205526947975159, |
|
"learning_rate": 4.362641148282781e-05, |
|
"loss": 0.9721, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 0.7224433292957259, |
|
"grad_norm": 0.4239148199558258, |
|
"learning_rate": 4.337721554313705e-05, |
|
"loss": 0.9012, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 0.7233085308876969, |
|
"grad_norm": 0.4548054337501526, |
|
"learning_rate": 4.312853608752531e-05, |
|
"loss": 0.9368, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 0.7241737324796678, |
|
"grad_norm": 0.4600510597229004, |
|
"learning_rate": 4.288037538431933e-05, |
|
"loss": 0.9614, |
|
"step": 4185 |
|
}, |
|
{ |
|
"epoch": 0.7250389340716387, |
|
"grad_norm": 0.4393291175365448, |
|
"learning_rate": 4.2632735697114226e-05, |
|
"loss": 0.9403, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 0.7259041356636097, |
|
"grad_norm": 0.4278406798839569, |
|
"learning_rate": 4.238561928475244e-05, |
|
"loss": 0.9459, |
|
"step": 4195 |
|
}, |
|
{ |
|
"epoch": 0.7267693372555806, |
|
"grad_norm": 0.43765777349472046, |
|
"learning_rate": 4.2139028401303625e-05, |
|
"loss": 0.9891, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.7276345388475515, |
|
"grad_norm": 0.4184790551662445, |
|
"learning_rate": 4.189296529604355e-05, |
|
"loss": 0.9551, |
|
"step": 4205 |
|
}, |
|
{ |
|
"epoch": 0.7284997404395224, |
|
"grad_norm": 0.4558292031288147, |
|
"learning_rate": 4.164743221343417e-05, |
|
"loss": 0.9709, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 0.7293649420314934, |
|
"grad_norm": 0.4263213276863098, |
|
"learning_rate": 4.1402431393102516e-05, |
|
"loss": 0.967, |
|
"step": 4215 |
|
}, |
|
{ |
|
"epoch": 0.7302301436234643, |
|
"grad_norm": 0.44499507546424866, |
|
"learning_rate": 4.115796506982087e-05, |
|
"loss": 0.9841, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 0.7310953452154352, |
|
"grad_norm": 0.46089598536491394, |
|
"learning_rate": 4.091403547348597e-05, |
|
"loss": 1.0149, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 0.7319605468074061, |
|
"grad_norm": 0.44323694705963135, |
|
"learning_rate": 4.0670644829098824e-05, |
|
"loss": 0.945, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 0.7328257483993771, |
|
"grad_norm": 0.4892789125442505, |
|
"learning_rate": 4.0427795356744445e-05, |
|
"loss": 0.9208, |
|
"step": 4235 |
|
}, |
|
{ |
|
"epoch": 0.733690949991348, |
|
"grad_norm": 0.43195831775665283, |
|
"learning_rate": 4.018548927157141e-05, |
|
"loss": 0.948, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 0.7345561515833189, |
|
"grad_norm": 0.4387545585632324, |
|
"learning_rate": 3.994372878377202e-05, |
|
"loss": 1.029, |
|
"step": 4245 |
|
}, |
|
{ |
|
"epoch": 0.7354213531752899, |
|
"grad_norm": 0.4202425479888916, |
|
"learning_rate": 3.970251609856166e-05, |
|
"loss": 0.9322, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 0.7362865547672608, |
|
"grad_norm": 0.4669027030467987, |
|
"learning_rate": 3.946185341615918e-05, |
|
"loss": 0.9433, |
|
"step": 4255 |
|
}, |
|
{ |
|
"epoch": 0.7371517563592317, |
|
"grad_norm": 0.426996648311615, |
|
"learning_rate": 3.9221742931766436e-05, |
|
"loss": 0.9624, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 0.7380169579512026, |
|
"grad_norm": 0.4329177439212799, |
|
"learning_rate": 3.898218683554845e-05, |
|
"loss": 0.9852, |
|
"step": 4265 |
|
}, |
|
{ |
|
"epoch": 0.7388821595431736, |
|
"grad_norm": 0.40790703892707825, |
|
"learning_rate": 3.874318731261343e-05, |
|
"loss": 0.9441, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 0.7397473611351445, |
|
"grad_norm": 0.43579617142677307, |
|
"learning_rate": 3.8504746542992765e-05, |
|
"loss": 0.9446, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 0.7406125627271154, |
|
"grad_norm": 0.4374306797981262, |
|
"learning_rate": 3.826686670162119e-05, |
|
"loss": 0.932, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 0.7414777643190863, |
|
"grad_norm": 0.43435388803482056, |
|
"learning_rate": 3.802954995831699e-05, |
|
"loss": 0.9898, |
|
"step": 4285 |
|
}, |
|
{ |
|
"epoch": 0.7423429659110573, |
|
"grad_norm": 0.4677979350090027, |
|
"learning_rate": 3.779279847776208e-05, |
|
"loss": 0.9797, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 0.7432081675030282, |
|
"grad_norm": 0.43926307559013367, |
|
"learning_rate": 3.7556614419482385e-05, |
|
"loss": 0.9666, |
|
"step": 4295 |
|
}, |
|
{ |
|
"epoch": 0.7440733690949991, |
|
"grad_norm": 0.4474581778049469, |
|
"learning_rate": 3.7320999937828084e-05, |
|
"loss": 0.9616, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.7449385706869701, |
|
"grad_norm": 0.4644235074520111, |
|
"learning_rate": 3.7085957181953966e-05, |
|
"loss": 1.0063, |
|
"step": 4305 |
|
}, |
|
{ |
|
"epoch": 0.745803772278941, |
|
"grad_norm": 0.4108608067035675, |
|
"learning_rate": 3.685148829579984e-05, |
|
"loss": 0.9367, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 0.7466689738709119, |
|
"grad_norm": 0.44119784235954285, |
|
"learning_rate": 3.661759541807096e-05, |
|
"loss": 0.9638, |
|
"step": 4315 |
|
}, |
|
{ |
|
"epoch": 0.7475341754628828, |
|
"grad_norm": 0.4548141360282898, |
|
"learning_rate": 3.638428068221853e-05, |
|
"loss": 0.9699, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 0.7483993770548538, |
|
"grad_norm": 0.4526442289352417, |
|
"learning_rate": 3.615154621642023e-05, |
|
"loss": 0.9521, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 0.7492645786468247, |
|
"grad_norm": 0.5478776693344116, |
|
"learning_rate": 3.591939414356081e-05, |
|
"loss": 0.9538, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 0.7501297802387956, |
|
"grad_norm": 0.423746258020401, |
|
"learning_rate": 3.568782658121275e-05, |
|
"loss": 0.9548, |
|
"step": 4335 |
|
}, |
|
{ |
|
"epoch": 0.7509949818307665, |
|
"grad_norm": 0.44348010420799255, |
|
"learning_rate": 3.545684564161697e-05, |
|
"loss": 0.9325, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 0.7518601834227375, |
|
"grad_norm": 0.4436131417751312, |
|
"learning_rate": 3.522645343166336e-05, |
|
"loss": 0.9738, |
|
"step": 4345 |
|
}, |
|
{ |
|
"epoch": 0.7527253850147084, |
|
"grad_norm": 0.4240371882915497, |
|
"learning_rate": 3.499665205287189e-05, |
|
"loss": 0.9721, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 0.7535905866066793, |
|
"grad_norm": 0.4415445327758789, |
|
"learning_rate": 3.476744360137317e-05, |
|
"loss": 0.9401, |
|
"step": 4355 |
|
}, |
|
{ |
|
"epoch": 0.7544557881986503, |
|
"grad_norm": 0.43800264596939087, |
|
"learning_rate": 3.453883016788948e-05, |
|
"loss": 0.9331, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 0.7553209897906212, |
|
"grad_norm": 0.4613566994667053, |
|
"learning_rate": 3.4310813837715625e-05, |
|
"loss": 0.9906, |
|
"step": 4365 |
|
}, |
|
{ |
|
"epoch": 0.7561861913825921, |
|
"grad_norm": 0.43836039304733276, |
|
"learning_rate": 3.408339669069991e-05, |
|
"loss": 0.9468, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 0.757051392974563, |
|
"grad_norm": 0.43051472306251526, |
|
"learning_rate": 3.3856580801225327e-05, |
|
"loss": 0.9338, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 0.757916594566534, |
|
"grad_norm": 0.445026159286499, |
|
"learning_rate": 3.363036823819029e-05, |
|
"loss": 0.9629, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 0.7587817961585049, |
|
"grad_norm": 0.4377189874649048, |
|
"learning_rate": 3.340476106499023e-05, |
|
"loss": 0.9837, |
|
"step": 4385 |
|
}, |
|
{ |
|
"epoch": 0.7596469977504758, |
|
"grad_norm": 0.43655675649642944, |
|
"learning_rate": 3.317976133949825e-05, |
|
"loss": 0.9978, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 0.7605121993424467, |
|
"grad_norm": 0.44946399331092834, |
|
"learning_rate": 3.2955371114046895e-05, |
|
"loss": 0.9705, |
|
"step": 4395 |
|
}, |
|
{ |
|
"epoch": 0.7613774009344177, |
|
"grad_norm": 0.3919507563114166, |
|
"learning_rate": 3.2731592435408906e-05, |
|
"loss": 0.9462, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.7622426025263886, |
|
"grad_norm": 0.4803646206855774, |
|
"learning_rate": 3.250842734477904e-05, |
|
"loss": 0.953, |
|
"step": 4405 |
|
}, |
|
{ |
|
"epoch": 0.7631078041183595, |
|
"grad_norm": 0.4366481304168701, |
|
"learning_rate": 3.22858778777551e-05, |
|
"loss": 0.9663, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 0.7639730057103306, |
|
"grad_norm": 0.4419713616371155, |
|
"learning_rate": 3.206394606431941e-05, |
|
"loss": 0.9984, |
|
"step": 4415 |
|
}, |
|
{ |
|
"epoch": 0.7648382073023015, |
|
"grad_norm": 0.44880411028862, |
|
"learning_rate": 3.1842633928820574e-05, |
|
"loss": 0.9361, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 0.7657034088942724, |
|
"grad_norm": 0.4589973986148834, |
|
"learning_rate": 3.162194348995459e-05, |
|
"loss": 0.9589, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 0.7665686104862433, |
|
"grad_norm": 0.46531566977500916, |
|
"learning_rate": 3.1401876760746876e-05, |
|
"loss": 0.9758, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 0.7674338120782143, |
|
"grad_norm": 0.4017270505428314, |
|
"learning_rate": 3.1182435748533455e-05, |
|
"loss": 0.9565, |
|
"step": 4435 |
|
}, |
|
{ |
|
"epoch": 0.7682990136701852, |
|
"grad_norm": 0.4428539574146271, |
|
"learning_rate": 3.096362245494311e-05, |
|
"loss": 0.9959, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 0.7691642152621561, |
|
"grad_norm": 0.44016364216804504, |
|
"learning_rate": 3.074543887587874e-05, |
|
"loss": 0.9669, |
|
"step": 4445 |
|
}, |
|
{ |
|
"epoch": 0.770029416854127, |
|
"grad_norm": 0.454472154378891, |
|
"learning_rate": 3.0527887001499364e-05, |
|
"loss": 0.947, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 0.770894618446098, |
|
"grad_norm": 0.4053809344768524, |
|
"learning_rate": 3.0310968816201925e-05, |
|
"loss": 0.9217, |
|
"step": 4455 |
|
}, |
|
{ |
|
"epoch": 0.7717598200380689, |
|
"grad_norm": 0.43864405155181885, |
|
"learning_rate": 3.0094686298603136e-05, |
|
"loss": 0.9562, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 0.7726250216300398, |
|
"grad_norm": 0.4394964277744293, |
|
"learning_rate": 2.9879041421521527e-05, |
|
"loss": 0.9766, |
|
"step": 4465 |
|
}, |
|
{ |
|
"epoch": 0.7734902232220108, |
|
"grad_norm": 0.44996631145477295, |
|
"learning_rate": 2.966403615195934e-05, |
|
"loss": 0.9347, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 0.7743554248139817, |
|
"grad_norm": 0.48114338517189026, |
|
"learning_rate": 2.944967245108471e-05, |
|
"loss": 0.9749, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 0.7752206264059526, |
|
"grad_norm": 0.45745283365249634, |
|
"learning_rate": 2.923595227421364e-05, |
|
"loss": 0.9799, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 0.7760858279979235, |
|
"grad_norm": 0.4538094103336334, |
|
"learning_rate": 2.9022877570792305e-05, |
|
"loss": 0.9462, |
|
"step": 4485 |
|
}, |
|
{ |
|
"epoch": 0.7769510295898945, |
|
"grad_norm": 0.4058148264884949, |
|
"learning_rate": 2.8810450284379153e-05, |
|
"loss": 0.9927, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 0.7778162311818654, |
|
"grad_norm": 0.435544490814209, |
|
"learning_rate": 2.8598672352627244e-05, |
|
"loss": 0.9089, |
|
"step": 4495 |
|
}, |
|
{ |
|
"epoch": 0.7786814327738363, |
|
"grad_norm": 0.4508376717567444, |
|
"learning_rate": 2.838754570726655e-05, |
|
"loss": 0.9786, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.7795466343658072, |
|
"grad_norm": 0.44496816396713257, |
|
"learning_rate": 2.8177072274086348e-05, |
|
"loss": 0.9692, |
|
"step": 4505 |
|
}, |
|
{ |
|
"epoch": 0.7804118359577782, |
|
"grad_norm": 0.5042269229888916, |
|
"learning_rate": 2.7967253972917605e-05, |
|
"loss": 0.9742, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 0.7812770375497491, |
|
"grad_norm": 0.44786500930786133, |
|
"learning_rate": 2.7758092717615657e-05, |
|
"loss": 0.9582, |
|
"step": 4515 |
|
}, |
|
{ |
|
"epoch": 0.78214223914172, |
|
"grad_norm": 0.4112664461135864, |
|
"learning_rate": 2.754959041604239e-05, |
|
"loss": 1.0003, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 0.783007440733691, |
|
"grad_norm": 0.4549863636493683, |
|
"learning_rate": 2.7341748970049196e-05, |
|
"loss": 0.9559, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 0.7838726423256619, |
|
"grad_norm": 0.42742258310317993, |
|
"learning_rate": 2.7134570275459435e-05, |
|
"loss": 0.9488, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 0.7847378439176328, |
|
"grad_norm": 0.43447646498680115, |
|
"learning_rate": 2.6928056222051202e-05, |
|
"loss": 0.9612, |
|
"step": 4535 |
|
}, |
|
{ |
|
"epoch": 0.7856030455096037, |
|
"grad_norm": 0.44213923811912537, |
|
"learning_rate": 2.6722208693540052e-05, |
|
"loss": 0.9417, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 0.7864682471015747, |
|
"grad_norm": 0.4459418058395386, |
|
"learning_rate": 2.6517029567561836e-05, |
|
"loss": 1.0021, |
|
"step": 4545 |
|
}, |
|
{ |
|
"epoch": 0.7873334486935456, |
|
"grad_norm": 0.4390890598297119, |
|
"learning_rate": 2.6312520715655685e-05, |
|
"loss": 0.9761, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 0.7881986502855165, |
|
"grad_norm": 0.47070950269699097, |
|
"learning_rate": 2.610868400324663e-05, |
|
"loss": 0.956, |
|
"step": 4555 |
|
}, |
|
{ |
|
"epoch": 0.7890638518774875, |
|
"grad_norm": 0.4373131990432739, |
|
"learning_rate": 2.590552128962902e-05, |
|
"loss": 0.9721, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 0.7899290534694584, |
|
"grad_norm": 0.42470088601112366, |
|
"learning_rate": 2.5703034427949103e-05, |
|
"loss": 0.9514, |
|
"step": 4565 |
|
}, |
|
{ |
|
"epoch": 0.7907942550614293, |
|
"grad_norm": 0.45303651690483093, |
|
"learning_rate": 2.5501225265188578e-05, |
|
"loss": 1.0286, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 0.7916594566534002, |
|
"grad_norm": 0.4624740779399872, |
|
"learning_rate": 2.5300095642147293e-05, |
|
"loss": 0.9757, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 0.7925246582453712, |
|
"grad_norm": 0.4534415900707245, |
|
"learning_rate": 2.5099647393426883e-05, |
|
"loss": 1.0163, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 0.7933898598373421, |
|
"grad_norm": 0.43228623270988464, |
|
"learning_rate": 2.489988234741376e-05, |
|
"loss": 0.928, |
|
"step": 4585 |
|
}, |
|
{ |
|
"epoch": 0.794255061429313, |
|
"grad_norm": 0.4066708981990814, |
|
"learning_rate": 2.4700802326262405e-05, |
|
"loss": 0.9341, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 0.7951202630212839, |
|
"grad_norm": 0.45343056321144104, |
|
"learning_rate": 2.450240914587907e-05, |
|
"loss": 0.9669, |
|
"step": 4595 |
|
}, |
|
{ |
|
"epoch": 0.7959854646132549, |
|
"grad_norm": 0.43527576327323914, |
|
"learning_rate": 2.430470461590475e-05, |
|
"loss": 0.9616, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.7968506662052258, |
|
"grad_norm": 0.44016727805137634, |
|
"learning_rate": 2.4107690539699157e-05, |
|
"loss": 0.9827, |
|
"step": 4605 |
|
}, |
|
{ |
|
"epoch": 0.7977158677971967, |
|
"grad_norm": 0.4747732877731323, |
|
"learning_rate": 2.3911368714323835e-05, |
|
"loss": 0.955, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 0.7985810693891677, |
|
"grad_norm": 0.41703560948371887, |
|
"learning_rate": 2.371574093052612e-05, |
|
"loss": 0.9736, |
|
"step": 4615 |
|
}, |
|
{ |
|
"epoch": 0.7994462709811386, |
|
"grad_norm": 0.40136978030204773, |
|
"learning_rate": 2.3520808972722596e-05, |
|
"loss": 0.9487, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 0.8003114725731095, |
|
"grad_norm": 0.46093153953552246, |
|
"learning_rate": 2.3326574618982865e-05, |
|
"loss": 0.9431, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 0.8011766741650804, |
|
"grad_norm": 0.459598183631897, |
|
"learning_rate": 2.3133039641013366e-05, |
|
"loss": 0.9502, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 0.8020418757570514, |
|
"grad_norm": 0.4129520058631897, |
|
"learning_rate": 2.294020580414118e-05, |
|
"loss": 0.9672, |
|
"step": 4635 |
|
}, |
|
{ |
|
"epoch": 0.8029070773490223, |
|
"grad_norm": 0.460458904504776, |
|
"learning_rate": 2.2748074867297918e-05, |
|
"loss": 1.0069, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 0.8037722789409932, |
|
"grad_norm": 0.45732662081718445, |
|
"learning_rate": 2.2556648583003746e-05, |
|
"loss": 1.0105, |
|
"step": 4645 |
|
}, |
|
{ |
|
"epoch": 0.8046374805329641, |
|
"grad_norm": 0.42087164521217346, |
|
"learning_rate": 2.2365928697351314e-05, |
|
"loss": 0.9292, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.8055026821249351, |
|
"grad_norm": 0.4139239490032196, |
|
"learning_rate": 2.217591694998985e-05, |
|
"loss": 0.9789, |
|
"step": 4655 |
|
}, |
|
{ |
|
"epoch": 0.806367883716906, |
|
"grad_norm": 0.4530125856399536, |
|
"learning_rate": 2.198661507410935e-05, |
|
"loss": 0.9588, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 0.8072330853088769, |
|
"grad_norm": 0.4521411955356598, |
|
"learning_rate": 2.17980247964247e-05, |
|
"loss": 0.9694, |
|
"step": 4665 |
|
}, |
|
{ |
|
"epoch": 0.808098286900848, |
|
"grad_norm": 0.41040509939193726, |
|
"learning_rate": 2.1610147837159955e-05, |
|
"loss": 0.9449, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 0.8089634884928188, |
|
"grad_norm": 0.45691537857055664, |
|
"learning_rate": 2.1422985910032632e-05, |
|
"loss": 0.9519, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 0.8098286900847897, |
|
"grad_norm": 0.4356061518192291, |
|
"learning_rate": 2.1236540722238118e-05, |
|
"loss": 0.9641, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 0.8106938916767606, |
|
"grad_norm": 0.44307219982147217, |
|
"learning_rate": 2.105081397443406e-05, |
|
"loss": 0.9275, |
|
"step": 4685 |
|
}, |
|
{ |
|
"epoch": 0.8115590932687317, |
|
"grad_norm": 0.44193556904792786, |
|
"learning_rate": 2.0865807360724832e-05, |
|
"loss": 0.9502, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 0.8124242948607026, |
|
"grad_norm": 0.4376203119754791, |
|
"learning_rate": 2.0681522568646162e-05, |
|
"loss": 0.9811, |
|
"step": 4695 |
|
}, |
|
{ |
|
"epoch": 0.8132894964526735, |
|
"grad_norm": 0.4987525939941406, |
|
"learning_rate": 2.0497961279149658e-05, |
|
"loss": 0.9999, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 0.8141546980446444, |
|
"grad_norm": 0.5082269310951233, |
|
"learning_rate": 2.031512516658751e-05, |
|
"loss": 0.9405, |
|
"step": 4705 |
|
}, |
|
{ |
|
"epoch": 0.8150198996366154, |
|
"grad_norm": 0.46282055974006653, |
|
"learning_rate": 2.0133015898697237e-05, |
|
"loss": 0.9807, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 0.8158851012285863, |
|
"grad_norm": 0.4175664484500885, |
|
"learning_rate": 1.9951635136586434e-05, |
|
"loss": 0.9495, |
|
"step": 4715 |
|
}, |
|
{ |
|
"epoch": 0.8167503028205572, |
|
"grad_norm": 0.4660712778568268, |
|
"learning_rate": 1.977098453471764e-05, |
|
"loss": 0.9512, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 0.8176155044125282, |
|
"grad_norm": 0.43538960814476013, |
|
"learning_rate": 1.9591065740893245e-05, |
|
"loss": 0.9572, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 0.8184807060044991, |
|
"grad_norm": 0.46293970942497253, |
|
"learning_rate": 1.9411880396240435e-05, |
|
"loss": 0.9913, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 0.81934590759647, |
|
"grad_norm": 0.44890257716178894, |
|
"learning_rate": 1.9233430135196362e-05, |
|
"loss": 0.9729, |
|
"step": 4735 |
|
}, |
|
{ |
|
"epoch": 0.8202111091884409, |
|
"grad_norm": 0.49698060750961304, |
|
"learning_rate": 1.9055716585492922e-05, |
|
"loss": 1.0063, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 0.8210763107804119, |
|
"grad_norm": 0.47737550735473633, |
|
"learning_rate": 1.887874136814234e-05, |
|
"loss": 0.988, |
|
"step": 4745 |
|
}, |
|
{ |
|
"epoch": 0.8219415123723828, |
|
"grad_norm": 0.4616323709487915, |
|
"learning_rate": 1.8702506097421914e-05, |
|
"loss": 0.972, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 0.8228067139643537, |
|
"grad_norm": 0.41429662704467773, |
|
"learning_rate": 1.8527012380859756e-05, |
|
"loss": 0.9651, |
|
"step": 4755 |
|
}, |
|
{ |
|
"epoch": 0.8236719155563246, |
|
"grad_norm": 0.47355395555496216, |
|
"learning_rate": 1.8352261819219672e-05, |
|
"loss": 0.957, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 0.8245371171482956, |
|
"grad_norm": 0.4533310830593109, |
|
"learning_rate": 1.8178256006486993e-05, |
|
"loss": 1.0007, |
|
"step": 4765 |
|
}, |
|
{ |
|
"epoch": 0.8254023187402665, |
|
"grad_norm": 0.4279444217681885, |
|
"learning_rate": 1.8004996529853756e-05, |
|
"loss": 0.9577, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 0.8262675203322374, |
|
"grad_norm": 0.45256754755973816, |
|
"learning_rate": 1.7832484969704188e-05, |
|
"loss": 1.0217, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 0.8271327219242084, |
|
"grad_norm": 0.46098792552948, |
|
"learning_rate": 1.7660722899600624e-05, |
|
"loss": 0.9831, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 0.8279979235161793, |
|
"grad_norm": 0.4408135712146759, |
|
"learning_rate": 1.7489711886268712e-05, |
|
"loss": 0.9692, |
|
"step": 4785 |
|
}, |
|
{ |
|
"epoch": 0.8288631251081502, |
|
"grad_norm": 0.4415886700153351, |
|
"learning_rate": 1.731945348958356e-05, |
|
"loss": 0.9818, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 0.8297283267001211, |
|
"grad_norm": 0.4410308301448822, |
|
"learning_rate": 1.7149949262555054e-05, |
|
"loss": 1.0049, |
|
"step": 4795 |
|
}, |
|
{ |
|
"epoch": 0.8305935282920921, |
|
"grad_norm": 0.4444870352745056, |
|
"learning_rate": 1.6981200751314165e-05, |
|
"loss": 0.9561, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.831458729884063, |
|
"grad_norm": 0.4517926871776581, |
|
"learning_rate": 1.6813209495098447e-05, |
|
"loss": 0.9454, |
|
"step": 4805 |
|
}, |
|
{ |
|
"epoch": 0.8323239314760339, |
|
"grad_norm": 0.4491353929042816, |
|
"learning_rate": 1.6645977026238235e-05, |
|
"loss": 0.9803, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 0.8331891330680048, |
|
"grad_norm": 0.42471835017204285, |
|
"learning_rate": 1.647950487014257e-05, |
|
"loss": 0.938, |
|
"step": 4815 |
|
}, |
|
{ |
|
"epoch": 0.8340543346599758, |
|
"grad_norm": 0.4573848247528076, |
|
"learning_rate": 1.631379454528531e-05, |
|
"loss": 0.906, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 0.8349195362519467, |
|
"grad_norm": 0.48127248883247375, |
|
"learning_rate": 1.6148847563191294e-05, |
|
"loss": 0.9694, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 0.8357847378439176, |
|
"grad_norm": 0.42135560512542725, |
|
"learning_rate": 1.598466542842253e-05, |
|
"loss": 0.9399, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 0.8366499394358886, |
|
"grad_norm": 0.4253736734390259, |
|
"learning_rate": 1.5821249638564462e-05, |
|
"loss": 0.9661, |
|
"step": 4835 |
|
}, |
|
{ |
|
"epoch": 0.8375151410278595, |
|
"grad_norm": 0.4118526577949524, |
|
"learning_rate": 1.565860168421235e-05, |
|
"loss": 0.9096, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 0.8383803426198304, |
|
"grad_norm": 0.41610080003738403, |
|
"learning_rate": 1.549672304895764e-05, |
|
"loss": 0.9544, |
|
"step": 4845 |
|
}, |
|
{ |
|
"epoch": 0.8392455442118013, |
|
"grad_norm": 0.44422540068626404, |
|
"learning_rate": 1.5335615209374453e-05, |
|
"loss": 0.9124, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 0.8401107458037723, |
|
"grad_norm": 0.5014816522598267, |
|
"learning_rate": 1.51752796350061e-05, |
|
"loss": 0.977, |
|
"step": 4855 |
|
}, |
|
{ |
|
"epoch": 0.8409759473957432, |
|
"grad_norm": 0.44182294607162476, |
|
"learning_rate": 1.501571778835168e-05, |
|
"loss": 0.9411, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 0.8418411489877141, |
|
"grad_norm": 0.4564247131347656, |
|
"learning_rate": 1.4856931124852746e-05, |
|
"loss": 0.9687, |
|
"step": 4865 |
|
}, |
|
{ |
|
"epoch": 0.842706350579685, |
|
"grad_norm": 0.4558230936527252, |
|
"learning_rate": 1.4698921092880036e-05, |
|
"loss": 0.9988, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 0.843571552171656, |
|
"grad_norm": 0.4334022104740143, |
|
"learning_rate": 1.4541689133720238e-05, |
|
"loss": 0.9319, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 0.8444367537636269, |
|
"grad_norm": 0.45239973068237305, |
|
"learning_rate": 1.4385236681562863e-05, |
|
"loss": 0.9476, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 0.8453019553555978, |
|
"grad_norm": 0.4498237073421478, |
|
"learning_rate": 1.4229565163487157e-05, |
|
"loss": 0.958, |
|
"step": 4885 |
|
}, |
|
{ |
|
"epoch": 0.8461671569475688, |
|
"grad_norm": 0.47215980291366577, |
|
"learning_rate": 1.4074675999449093e-05, |
|
"loss": 0.9464, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 0.8470323585395397, |
|
"grad_norm": 0.48363256454467773, |
|
"learning_rate": 1.3920570602268413e-05, |
|
"loss": 0.9376, |
|
"step": 4895 |
|
}, |
|
{ |
|
"epoch": 0.8478975601315106, |
|
"grad_norm": 0.44979047775268555, |
|
"learning_rate": 1.3767250377615703e-05, |
|
"loss": 0.9764, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 0.8487627617234815, |
|
"grad_norm": 0.4314928352832794, |
|
"learning_rate": 1.3614716723999644e-05, |
|
"loss": 0.9463, |
|
"step": 4905 |
|
}, |
|
{ |
|
"epoch": 0.8496279633154525, |
|
"grad_norm": 0.44183149933815, |
|
"learning_rate": 1.3462971032754246e-05, |
|
"loss": 0.9331, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 0.8504931649074234, |
|
"grad_norm": 0.42287370562553406, |
|
"learning_rate": 1.331201468802602e-05, |
|
"loss": 0.9585, |
|
"step": 4915 |
|
}, |
|
{ |
|
"epoch": 0.8513583664993943, |
|
"grad_norm": 0.4583037197589874, |
|
"learning_rate": 1.3161849066761611e-05, |
|
"loss": 0.93, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 0.8522235680913652, |
|
"grad_norm": 0.3929138779640198, |
|
"learning_rate": 1.3012475538694934e-05, |
|
"loss": 0.9642, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 0.8530887696833362, |
|
"grad_norm": 0.44397783279418945, |
|
"learning_rate": 1.2863895466334996e-05, |
|
"loss": 0.996, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 0.8539539712753071, |
|
"grad_norm": 0.446635365486145, |
|
"learning_rate": 1.2716110204953124e-05, |
|
"loss": 0.9543, |
|
"step": 4935 |
|
}, |
|
{ |
|
"epoch": 0.854819172867278, |
|
"grad_norm": 0.4561355710029602, |
|
"learning_rate": 1.2569121102570969e-05, |
|
"loss": 0.9529, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 0.855684374459249, |
|
"grad_norm": 0.4427773654460907, |
|
"learning_rate": 1.2422929499947933e-05, |
|
"loss": 0.9941, |
|
"step": 4945 |
|
}, |
|
{ |
|
"epoch": 0.85654957605122, |
|
"grad_norm": 0.4415104389190674, |
|
"learning_rate": 1.2277536730568972e-05, |
|
"loss": 0.9566, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 0.8574147776431909, |
|
"grad_norm": 0.45385250449180603, |
|
"learning_rate": 1.2132944120632661e-05, |
|
"loss": 0.9628, |
|
"step": 4955 |
|
}, |
|
{ |
|
"epoch": 0.8582799792351617, |
|
"grad_norm": 0.42043399810791016, |
|
"learning_rate": 1.1989152989038743e-05, |
|
"loss": 0.997, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 0.8591451808271328, |
|
"grad_norm": 0.4300490617752075, |
|
"learning_rate": 1.1846164647376457e-05, |
|
"loss": 0.9709, |
|
"step": 4965 |
|
}, |
|
{ |
|
"epoch": 0.8600103824191037, |
|
"grad_norm": 0.41449466347694397, |
|
"learning_rate": 1.170398039991224e-05, |
|
"loss": 0.972, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 0.8608755840110746, |
|
"grad_norm": 0.39637109637260437, |
|
"learning_rate": 1.1562601543578167e-05, |
|
"loss": 0.9513, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 0.8617407856030455, |
|
"grad_norm": 0.43413782119750977, |
|
"learning_rate": 1.1422029367959753e-05, |
|
"loss": 0.9851, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 0.8626059871950165, |
|
"grad_norm": 0.4083414375782013, |
|
"learning_rate": 1.1282265155284566e-05, |
|
"loss": 0.9846, |
|
"step": 4985 |
|
}, |
|
{ |
|
"epoch": 0.8634711887869874, |
|
"grad_norm": 0.4629712700843811, |
|
"learning_rate": 1.1143310180410238e-05, |
|
"loss": 0.9431, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 0.8643363903789583, |
|
"grad_norm": 0.43061643838882446, |
|
"learning_rate": 1.1005165710812982e-05, |
|
"loss": 0.9519, |
|
"step": 4995 |
|
}, |
|
{ |
|
"epoch": 0.8652015919709293, |
|
"grad_norm": 0.4178582429885864, |
|
"learning_rate": 1.086783300657599e-05, |
|
"loss": 0.9638, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.8660667935629002, |
|
"grad_norm": 0.41832438111305237, |
|
"learning_rate": 1.0731313320377945e-05, |
|
"loss": 0.9772, |
|
"step": 5005 |
|
}, |
|
{ |
|
"epoch": 0.8669319951548711, |
|
"grad_norm": 0.42187732458114624, |
|
"learning_rate": 1.059560789748163e-05, |
|
"loss": 0.9524, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 0.867797196746842, |
|
"grad_norm": 0.44064244627952576, |
|
"learning_rate": 1.0460717975722446e-05, |
|
"loss": 0.9601, |
|
"step": 5015 |
|
}, |
|
{ |
|
"epoch": 0.868662398338813, |
|
"grad_norm": 0.4325951635837555, |
|
"learning_rate": 1.0326644785497309e-05, |
|
"loss": 0.976, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 0.8695275999307839, |
|
"grad_norm": 0.42271390557289124, |
|
"learning_rate": 1.0193389549753275e-05, |
|
"loss": 0.9519, |
|
"step": 5025 |
|
}, |
|
{ |
|
"epoch": 0.8703928015227548, |
|
"grad_norm": 0.4525720179080963, |
|
"learning_rate": 1.0060953483976454e-05, |
|
"loss": 0.9833, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 0.8712580031147257, |
|
"grad_norm": 0.44258391857147217, |
|
"learning_rate": 9.929337796180904e-06, |
|
"loss": 0.984, |
|
"step": 5035 |
|
}, |
|
{ |
|
"epoch": 0.8721232047066967, |
|
"grad_norm": 0.4149342477321625, |
|
"learning_rate": 9.798543686897588e-06, |
|
"loss": 0.9293, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 0.8729884062986676, |
|
"grad_norm": 0.41939181089401245, |
|
"learning_rate": 9.668572349163507e-06, |
|
"loss": 0.979, |
|
"step": 5045 |
|
}, |
|
{ |
|
"epoch": 0.8738536078906385, |
|
"grad_norm": 0.44124025106430054, |
|
"learning_rate": 9.539424968510702e-06, |
|
"loss": 0.9822, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 0.8747188094826095, |
|
"grad_norm": 0.4244748651981354, |
|
"learning_rate": 9.411102722955534e-06, |
|
"loss": 1.0002, |
|
"step": 5055 |
|
}, |
|
{ |
|
"epoch": 0.8755840110745804, |
|
"grad_norm": 0.43866798281669617, |
|
"learning_rate": 9.283606782987887e-06, |
|
"loss": 0.9615, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 0.8764492126665513, |
|
"grad_norm": 0.454874724149704, |
|
"learning_rate": 9.156938311560525e-06, |
|
"loss": 0.946, |
|
"step": 5065 |
|
}, |
|
{ |
|
"epoch": 0.8773144142585222, |
|
"grad_norm": 0.44537651538848877, |
|
"learning_rate": 9.031098464078457e-06, |
|
"loss": 0.9726, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 0.8781796158504932, |
|
"grad_norm": 0.44006219506263733, |
|
"learning_rate": 8.906088388388411e-06, |
|
"loss": 0.9902, |
|
"step": 5075 |
|
}, |
|
{ |
|
"epoch": 0.8790448174424641, |
|
"grad_norm": 0.4104478061199188, |
|
"learning_rate": 8.781909224768337e-06, |
|
"loss": 0.9935, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 0.879910019034435, |
|
"grad_norm": 0.415463387966156, |
|
"learning_rate": 8.658562105917079e-06, |
|
"loss": 0.9851, |
|
"step": 5085 |
|
}, |
|
{ |
|
"epoch": 0.880775220626406, |
|
"grad_norm": 0.4624484181404114, |
|
"learning_rate": 8.536048156943932e-06, |
|
"loss": 0.9356, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 0.8816404222183769, |
|
"grad_norm": 0.4278486669063568, |
|
"learning_rate": 8.414368495358538e-06, |
|
"loss": 0.9641, |
|
"step": 5095 |
|
}, |
|
{ |
|
"epoch": 0.8825056238103478, |
|
"grad_norm": 0.4188804030418396, |
|
"learning_rate": 8.293524231060468e-06, |
|
"loss": 0.9955, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 0.8833708254023187, |
|
"grad_norm": 0.4604167342185974, |
|
"learning_rate": 8.173516466329345e-06, |
|
"loss": 0.984, |
|
"step": 5105 |
|
}, |
|
{ |
|
"epoch": 0.8842360269942897, |
|
"grad_norm": 0.40931734442710876, |
|
"learning_rate": 8.054346295814564e-06, |
|
"loss": 0.9508, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 0.8851012285862606, |
|
"grad_norm": 0.4736385643482208, |
|
"learning_rate": 7.936014806525538e-06, |
|
"loss": 0.9467, |
|
"step": 5115 |
|
}, |
|
{ |
|
"epoch": 0.8859664301782315, |
|
"grad_norm": 0.42703744769096375, |
|
"learning_rate": 7.818523077821527e-06, |
|
"loss": 0.9343, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 0.8868316317702024, |
|
"grad_norm": 0.4844820499420166, |
|
"learning_rate": 7.701872181401992e-06, |
|
"loss": 1.0121, |
|
"step": 5125 |
|
}, |
|
{ |
|
"epoch": 0.8876968333621734, |
|
"grad_norm": 0.42825546860694885, |
|
"learning_rate": 7.586063181296799e-06, |
|
"loss": 0.9751, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 0.8885620349541443, |
|
"grad_norm": 0.41822490096092224, |
|
"learning_rate": 7.471097133856353e-06, |
|
"loss": 0.9261, |
|
"step": 5135 |
|
}, |
|
{ |
|
"epoch": 0.8894272365461152, |
|
"grad_norm": 0.47537797689437866, |
|
"learning_rate": 7.356975087742202e-06, |
|
"loss": 0.9766, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 0.8902924381380862, |
|
"grad_norm": 0.45567959547042847, |
|
"learning_rate": 7.243698083917194e-06, |
|
"loss": 0.9857, |
|
"step": 5145 |
|
}, |
|
{ |
|
"epoch": 0.8911576397300571, |
|
"grad_norm": 0.43980252742767334, |
|
"learning_rate": 7.131267155636323e-06, |
|
"loss": 0.9359, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 0.892022841322028, |
|
"grad_norm": 0.4581001400947571, |
|
"learning_rate": 7.01968332843691e-06, |
|
"loss": 0.9722, |
|
"step": 5155 |
|
}, |
|
{ |
|
"epoch": 0.8928880429139989, |
|
"grad_norm": 0.4442357122898102, |
|
"learning_rate": 6.908947620129624e-06, |
|
"loss": 1.0104, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 0.8937532445059699, |
|
"grad_norm": 0.43554165959358215, |
|
"learning_rate": 6.799061040788934e-06, |
|
"loss": 0.9082, |
|
"step": 5165 |
|
}, |
|
{ |
|
"epoch": 0.8946184460979408, |
|
"grad_norm": 0.6001029014587402, |
|
"learning_rate": 6.690024592744027e-06, |
|
"loss": 0.9798, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 0.8954836476899117, |
|
"grad_norm": 0.44273775815963745, |
|
"learning_rate": 6.581839270569601e-06, |
|
"loss": 0.9635, |
|
"step": 5175 |
|
}, |
|
{ |
|
"epoch": 0.8963488492818826, |
|
"grad_norm": 0.4063413143157959, |
|
"learning_rate": 6.474506061076824e-06, |
|
"loss": 0.9386, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 0.8972140508738536, |
|
"grad_norm": 0.43752190470695496, |
|
"learning_rate": 6.368025943304367e-06, |
|
"loss": 0.9594, |
|
"step": 5185 |
|
}, |
|
{ |
|
"epoch": 0.8980792524658245, |
|
"grad_norm": 0.42202919721603394, |
|
"learning_rate": 6.262399888509351e-06, |
|
"loss": 0.9876, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 0.8989444540577954, |
|
"grad_norm": 0.43374916911125183, |
|
"learning_rate": 6.157628860158671e-06, |
|
"loss": 0.9623, |
|
"step": 5195 |
|
}, |
|
{ |
|
"epoch": 0.8998096556497664, |
|
"grad_norm": 0.4456869661808014, |
|
"learning_rate": 6.053713813920036e-06, |
|
"loss": 0.9817, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 0.9006748572417373, |
|
"grad_norm": 0.4352042078971863, |
|
"learning_rate": 5.950655697653362e-06, |
|
"loss": 0.9868, |
|
"step": 5205 |
|
}, |
|
{ |
|
"epoch": 0.9015400588337082, |
|
"grad_norm": 0.4586144685745239, |
|
"learning_rate": 5.848455451402057e-06, |
|
"loss": 0.9918, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 0.9024052604256791, |
|
"grad_norm": 0.4337520897388458, |
|
"learning_rate": 5.747114007384513e-06, |
|
"loss": 0.9956, |
|
"step": 5215 |
|
}, |
|
{ |
|
"epoch": 0.9032704620176502, |
|
"grad_norm": 0.45494288206100464, |
|
"learning_rate": 5.646632289985543e-06, |
|
"loss": 0.9722, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 0.904135663609621, |
|
"grad_norm": 0.4270175099372864, |
|
"learning_rate": 5.547011215747977e-06, |
|
"loss": 0.93, |
|
"step": 5225 |
|
}, |
|
{ |
|
"epoch": 0.905000865201592, |
|
"grad_norm": 0.4172538220882416, |
|
"learning_rate": 5.448251693364304e-06, |
|
"loss": 0.959, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 0.9058660667935629, |
|
"grad_norm": 0.4290398359298706, |
|
"learning_rate": 5.350354623668374e-06, |
|
"loss": 0.9706, |
|
"step": 5235 |
|
}, |
|
{ |
|
"epoch": 0.9067312683855339, |
|
"grad_norm": 0.4319567084312439, |
|
"learning_rate": 5.253320899627179e-06, |
|
"loss": 0.9898, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 0.9075964699775048, |
|
"grad_norm": 0.42448529601097107, |
|
"learning_rate": 5.157151406332739e-06, |
|
"loss": 0.9293, |
|
"step": 5245 |
|
}, |
|
{ |
|
"epoch": 0.9084616715694757, |
|
"grad_norm": 0.4146623909473419, |
|
"learning_rate": 5.0618470209939596e-06, |
|
"loss": 0.9676, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 0.9093268731614467, |
|
"grad_norm": 0.45237162709236145, |
|
"learning_rate": 4.9674086129287144e-06, |
|
"loss": 0.94, |
|
"step": 5255 |
|
}, |
|
{ |
|
"epoch": 0.9101920747534176, |
|
"grad_norm": 0.44053730368614197, |
|
"learning_rate": 4.8738370435558514e-06, |
|
"loss": 0.9613, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 0.9110572763453885, |
|
"grad_norm": 0.424537718296051, |
|
"learning_rate": 4.7811331663873745e-06, |
|
"loss": 0.9724, |
|
"step": 5265 |
|
}, |
|
{ |
|
"epoch": 0.9119224779373594, |
|
"grad_norm": 0.4328090250492096, |
|
"learning_rate": 4.689297827020634e-06, |
|
"loss": 0.9832, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 0.9127876795293304, |
|
"grad_norm": 0.4516524374485016, |
|
"learning_rate": 4.598331863130611e-06, |
|
"loss": 0.9805, |
|
"step": 5275 |
|
}, |
|
{ |
|
"epoch": 0.9136528811213013, |
|
"grad_norm": 0.45256564021110535, |
|
"learning_rate": 4.508236104462338e-06, |
|
"loss": 0.9512, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 0.9145180827132722, |
|
"grad_norm": 0.44559070467948914, |
|
"learning_rate": 4.4190113728231895e-06, |
|
"loss": 0.9284, |
|
"step": 5285 |
|
}, |
|
{ |
|
"epoch": 0.9153832843052431, |
|
"grad_norm": 0.3941524624824524, |
|
"learning_rate": 4.330658482075589e-06, |
|
"loss": 0.9409, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 0.9162484858972141, |
|
"grad_norm": 0.4107651710510254, |
|
"learning_rate": 4.243178238129386e-06, |
|
"loss": 0.9803, |
|
"step": 5295 |
|
}, |
|
{ |
|
"epoch": 0.917113687489185, |
|
"grad_norm": 0.4321724772453308, |
|
"learning_rate": 4.15657143893462e-06, |
|
"loss": 0.9904, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 0.9179788890811559, |
|
"grad_norm": 0.49873462319374084, |
|
"learning_rate": 4.070838874474225e-06, |
|
"loss": 1.0426, |
|
"step": 5305 |
|
}, |
|
{ |
|
"epoch": 0.9188440906731269, |
|
"grad_norm": 0.4886455833911896, |
|
"learning_rate": 3.985981326756793e-06, |
|
"loss": 1.0217, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 0.9197092922650978, |
|
"grad_norm": 0.42019376158714294, |
|
"learning_rate": 3.901999569809501e-06, |
|
"loss": 0.982, |
|
"step": 5315 |
|
}, |
|
{ |
|
"epoch": 0.9205744938570687, |
|
"grad_norm": 0.4222065210342407, |
|
"learning_rate": 3.818894369670933e-06, |
|
"loss": 0.9921, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 0.9214396954490396, |
|
"grad_norm": 0.4316330850124359, |
|
"learning_rate": 3.736666484384288e-06, |
|
"loss": 1.0336, |
|
"step": 5325 |
|
}, |
|
{ |
|
"epoch": 0.9223048970410106, |
|
"grad_norm": 0.41588303446769714, |
|
"learning_rate": 3.6553166639902048e-06, |
|
"loss": 0.9715, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 0.9231700986329815, |
|
"grad_norm": 0.4579836130142212, |
|
"learning_rate": 3.5748456505201975e-06, |
|
"loss": 0.9602, |
|
"step": 5335 |
|
}, |
|
{ |
|
"epoch": 0.9240353002249524, |
|
"grad_norm": 0.39906853437423706, |
|
"learning_rate": 3.4952541779896376e-06, |
|
"loss": 0.9555, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 0.9249005018169233, |
|
"grad_norm": 0.43084821105003357, |
|
"learning_rate": 3.4165429723912677e-06, |
|
"loss": 0.9961, |
|
"step": 5345 |
|
}, |
|
{ |
|
"epoch": 0.9257657034088943, |
|
"grad_norm": 0.4219900369644165, |
|
"learning_rate": 3.3387127516884443e-06, |
|
"loss": 0.9769, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 0.9266309050008652, |
|
"grad_norm": 0.4021409749984741, |
|
"learning_rate": 3.261764225808628e-06, |
|
"loss": 0.9609, |
|
"step": 5355 |
|
}, |
|
{ |
|
"epoch": 0.9274961065928361, |
|
"grad_norm": 0.4550817608833313, |
|
"learning_rate": 3.185698096636902e-06, |
|
"loss": 0.9429, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 0.9283613081848071, |
|
"grad_norm": 0.432493656873703, |
|
"learning_rate": 3.1105150580096108e-06, |
|
"loss": 0.9509, |
|
"step": 5365 |
|
}, |
|
{ |
|
"epoch": 0.929226509776778, |
|
"grad_norm": 0.43657249212265015, |
|
"learning_rate": 3.036215795707975e-06, |
|
"loss": 0.9454, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 0.9300917113687489, |
|
"grad_norm": 0.43158912658691406, |
|
"learning_rate": 2.9628009874518304e-06, |
|
"loss": 0.9408, |
|
"step": 5375 |
|
}, |
|
{ |
|
"epoch": 0.9309569129607198, |
|
"grad_norm": 0.4231918156147003, |
|
"learning_rate": 2.8902713028935545e-06, |
|
"loss": 0.9776, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 0.9318221145526908, |
|
"grad_norm": 0.4418452978134155, |
|
"learning_rate": 2.818627403611818e-06, |
|
"loss": 0.9733, |
|
"step": 5385 |
|
}, |
|
{ |
|
"epoch": 0.9326873161446617, |
|
"grad_norm": 0.45113956928253174, |
|
"learning_rate": 2.7478699431056186e-06, |
|
"loss": 0.961, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 0.9335525177366326, |
|
"grad_norm": 0.42509666085243225, |
|
"learning_rate": 2.677999566788325e-06, |
|
"loss": 0.9885, |
|
"step": 5395 |
|
}, |
|
{ |
|
"epoch": 0.9344177193286035, |
|
"grad_norm": 0.42578408122062683, |
|
"learning_rate": 2.6090169119817764e-06, |
|
"loss": 0.9629, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 0.9352829209205745, |
|
"grad_norm": 0.5043439865112305, |
|
"learning_rate": 2.5409226079104563e-06, |
|
"loss": 0.9571, |
|
"step": 5405 |
|
}, |
|
{ |
|
"epoch": 0.9361481225125454, |
|
"grad_norm": 0.4643230438232422, |
|
"learning_rate": 2.473717275695797e-06, |
|
"loss": 0.9987, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 0.9370133241045163, |
|
"grad_norm": 0.4713646471500397, |
|
"learning_rate": 2.40740152835045e-06, |
|
"loss": 0.9663, |
|
"step": 5415 |
|
}, |
|
{ |
|
"epoch": 0.9378785256964873, |
|
"grad_norm": 0.5420528054237366, |
|
"learning_rate": 2.3419759707727584e-06, |
|
"loss": 0.957, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 0.9387437272884582, |
|
"grad_norm": 0.43974393606185913, |
|
"learning_rate": 2.2774411997411814e-06, |
|
"loss": 0.9602, |
|
"step": 5425 |
|
}, |
|
{ |
|
"epoch": 0.9396089288804291, |
|
"grad_norm": 0.4206051528453827, |
|
"learning_rate": 2.21379780390889e-06, |
|
"loss": 0.9466, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 0.9404741304724, |
|
"grad_norm": 0.42323896288871765, |
|
"learning_rate": 2.1510463637984035e-06, |
|
"loss": 0.9683, |
|
"step": 5435 |
|
}, |
|
{ |
|
"epoch": 0.941339332064371, |
|
"grad_norm": 0.43212366104125977, |
|
"learning_rate": 2.0891874517962594e-06, |
|
"loss": 0.9524, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 0.9422045336563419, |
|
"grad_norm": 0.46724721789360046, |
|
"learning_rate": 2.0282216321478085e-06, |
|
"loss": 0.9736, |
|
"step": 5445 |
|
}, |
|
{ |
|
"epoch": 0.9430697352483128, |
|
"grad_norm": 0.44604164361953735, |
|
"learning_rate": 1.9681494609520736e-06, |
|
"loss": 0.9384, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 0.9439349368402837, |
|
"grad_norm": 0.4035724997520447, |
|
"learning_rate": 1.9089714861566964e-06, |
|
"loss": 1.0091, |
|
"step": 5455 |
|
}, |
|
{ |
|
"epoch": 0.9448001384322547, |
|
"grad_norm": 0.5425649285316467, |
|
"learning_rate": 1.8506882475528565e-06, |
|
"loss": 0.9617, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 0.9456653400242256, |
|
"grad_norm": 0.46280762553215027, |
|
"learning_rate": 1.7933002767704931e-06, |
|
"loss": 0.9462, |
|
"step": 5465 |
|
}, |
|
{ |
|
"epoch": 0.9465305416161965, |
|
"grad_norm": 0.45342132449150085, |
|
"learning_rate": 1.7368080972732792e-06, |
|
"loss": 0.9315, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 0.9473957432081676, |
|
"grad_norm": 0.43039941787719727, |
|
"learning_rate": 1.6812122243540119e-06, |
|
"loss": 1.0075, |
|
"step": 5475 |
|
}, |
|
{ |
|
"epoch": 0.9482609448001385, |
|
"grad_norm": 0.4240161180496216, |
|
"learning_rate": 1.626513165129795e-06, |
|
"loss": 0.9827, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 0.9491261463921093, |
|
"grad_norm": 0.4125286340713501, |
|
"learning_rate": 1.5727114185374758e-06, |
|
"loss": 0.9314, |
|
"step": 5485 |
|
}, |
|
{ |
|
"epoch": 0.9499913479840802, |
|
"grad_norm": 0.4606507122516632, |
|
"learning_rate": 1.5198074753290714e-06, |
|
"loss": 0.9825, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 0.9508565495760513, |
|
"grad_norm": 0.408927857875824, |
|
"learning_rate": 1.4678018180672715e-06, |
|
"loss": 0.9769, |
|
"step": 5495 |
|
}, |
|
{ |
|
"epoch": 0.9517217511680222, |
|
"grad_norm": 0.4514823257923126, |
|
"learning_rate": 1.41669492112112e-06, |
|
"loss": 0.9654, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.952586952759993, |
|
"grad_norm": 0.46445760130882263, |
|
"learning_rate": 1.3664872506615523e-06, |
|
"loss": 0.9393, |
|
"step": 5505 |
|
}, |
|
{ |
|
"epoch": 0.953452154351964, |
|
"grad_norm": 0.4294278621673584, |
|
"learning_rate": 1.3171792646572978e-06, |
|
"loss": 0.9528, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 0.954317355943935, |
|
"grad_norm": 0.3879070580005646, |
|
"learning_rate": 1.2687714128705397e-06, |
|
"loss": 0.8864, |
|
"step": 5515 |
|
}, |
|
{ |
|
"epoch": 0.9551825575359059, |
|
"grad_norm": 0.4252847135066986, |
|
"learning_rate": 1.2212641368529842e-06, |
|
"loss": 0.9317, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 0.9560477591278768, |
|
"grad_norm": 0.4258163273334503, |
|
"learning_rate": 1.1746578699416754e-06, |
|
"loss": 0.9282, |
|
"step": 5525 |
|
}, |
|
{ |
|
"epoch": 0.9569129607198478, |
|
"grad_norm": 0.44812753796577454, |
|
"learning_rate": 1.128953037255165e-06, |
|
"loss": 0.9677, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 0.9577781623118187, |
|
"grad_norm": 0.40613648295402527, |
|
"learning_rate": 1.0841500556895478e-06, |
|
"loss": 0.9674, |
|
"step": 5535 |
|
}, |
|
{ |
|
"epoch": 0.9586433639037896, |
|
"grad_norm": 0.4718643128871918, |
|
"learning_rate": 1.0402493339147112e-06, |
|
"loss": 0.9724, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 0.9595085654957605, |
|
"grad_norm": 0.4308999180793762, |
|
"learning_rate": 9.972512723705807e-07, |
|
"loss": 0.9625, |
|
"step": 5545 |
|
}, |
|
{ |
|
"epoch": 0.9603737670877315, |
|
"grad_norm": 0.4665636420249939, |
|
"learning_rate": 9.551562632634792e-07, |
|
"loss": 0.9491, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 0.9612389686797024, |
|
"grad_norm": 0.4143814742565155, |
|
"learning_rate": 9.13964690562552e-07, |
|
"loss": 0.9403, |
|
"step": 5555 |
|
}, |
|
{ |
|
"epoch": 0.9621041702716733, |
|
"grad_norm": 0.4198594391345978, |
|
"learning_rate": 8.736769299962588e-07, |
|
"loss": 0.9819, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 0.9629693718636442, |
|
"grad_norm": 0.42377784848213196, |
|
"learning_rate": 8.342933490489535e-07, |
|
"loss": 0.931, |
|
"step": 5565 |
|
}, |
|
{ |
|
"epoch": 0.9638345734556152, |
|
"grad_norm": 0.4581563472747803, |
|
"learning_rate": 7.958143069575097e-07, |
|
"loss": 0.9698, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 0.9646997750475861, |
|
"grad_norm": 0.4543211758136749, |
|
"learning_rate": 7.582401547080564e-07, |
|
"loss": 0.954, |
|
"step": 5575 |
|
}, |
|
{ |
|
"epoch": 0.965564976639557, |
|
"grad_norm": 0.4556075930595398, |
|
"learning_rate": 7.215712350328141e-07, |
|
"loss": 0.9849, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 0.966430178231528, |
|
"grad_norm": 0.43737539649009705, |
|
"learning_rate": 6.85807882406908e-07, |
|
"loss": 0.9737, |
|
"step": 5585 |
|
}, |
|
{ |
|
"epoch": 0.9672953798234989, |
|
"grad_norm": 0.46313777565956116, |
|
"learning_rate": 6.509504230453378e-07, |
|
"loss": 0.9459, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 0.9681605814154698, |
|
"grad_norm": 0.45123517513275146, |
|
"learning_rate": 6.169991749000237e-07, |
|
"loss": 0.9601, |
|
"step": 5595 |
|
}, |
|
{ |
|
"epoch": 0.9690257830074407, |
|
"grad_norm": 0.4670114517211914, |
|
"learning_rate": 5.839544476568981e-07, |
|
"loss": 0.9651, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 0.9698909845994117, |
|
"grad_norm": 0.44797414541244507, |
|
"learning_rate": 5.518165427330413e-07, |
|
"loss": 0.9654, |
|
"step": 5605 |
|
}, |
|
{ |
|
"epoch": 0.9707561861913826, |
|
"grad_norm": 0.4370104968547821, |
|
"learning_rate": 5.205857532740054e-07, |
|
"loss": 0.9191, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 0.9716213877833535, |
|
"grad_norm": 0.4665054678916931, |
|
"learning_rate": 4.902623641510617e-07, |
|
"loss": 0.9943, |
|
"step": 5615 |
|
}, |
|
{ |
|
"epoch": 0.9724865893753245, |
|
"grad_norm": 0.41058677434921265, |
|
"learning_rate": 4.608466519586574e-07, |
|
"loss": 0.9637, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 0.9733517909672954, |
|
"grad_norm": 0.4567413926124573, |
|
"learning_rate": 4.323388850118848e-07, |
|
"loss": 0.9586, |
|
"step": 5625 |
|
}, |
|
{ |
|
"epoch": 0.9742169925592663, |
|
"grad_norm": 0.3996705710887909, |
|
"learning_rate": 4.0473932334401665e-07, |
|
"loss": 0.9494, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 0.9750821941512372, |
|
"grad_norm": 0.4506674110889435, |
|
"learning_rate": 3.780482187041079e-07, |
|
"loss": 0.9389, |
|
"step": 5635 |
|
}, |
|
{ |
|
"epoch": 0.9759473957432082, |
|
"grad_norm": 0.4265097677707672, |
|
"learning_rate": 3.5226581455480857e-07, |
|
"loss": 1.0264, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 0.9768125973351791, |
|
"grad_norm": 0.44127118587493896, |
|
"learning_rate": 3.273923460699879e-07, |
|
"loss": 0.9439, |
|
"step": 5645 |
|
}, |
|
{ |
|
"epoch": 0.97767779892715, |
|
"grad_norm": 0.4313807785511017, |
|
"learning_rate": 3.0342804013271386e-07, |
|
"loss": 0.924, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 0.9785430005191209, |
|
"grad_norm": 0.4515504539012909, |
|
"learning_rate": 2.8037311533312126e-07, |
|
"loss": 0.944, |
|
"step": 5655 |
|
}, |
|
{ |
|
"epoch": 0.9794082021110919, |
|
"grad_norm": 0.4783179461956024, |
|
"learning_rate": 2.5822778196645803e-07, |
|
"loss": 0.9624, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 0.9802734037030628, |
|
"grad_norm": 0.43046215176582336, |
|
"learning_rate": 2.3699224203111992e-07, |
|
"loss": 0.9754, |
|
"step": 5665 |
|
}, |
|
{ |
|
"epoch": 0.9811386052950337, |
|
"grad_norm": 0.4199523627758026, |
|
"learning_rate": 2.1666668922682987e-07, |
|
"loss": 0.9489, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 0.9820038068870047, |
|
"grad_norm": 0.4476526379585266, |
|
"learning_rate": 1.97251308952906e-07, |
|
"loss": 0.951, |
|
"step": 5675 |
|
}, |
|
{ |
|
"epoch": 0.9828690084789756, |
|
"grad_norm": 0.43268635869026184, |
|
"learning_rate": 1.7874627830650748e-07, |
|
"loss": 0.9758, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 0.9837342100709465, |
|
"grad_norm": 0.4126809537410736, |
|
"learning_rate": 1.6115176608104688e-07, |
|
"loss": 0.94, |
|
"step": 5685 |
|
}, |
|
{ |
|
"epoch": 0.9845994116629174, |
|
"grad_norm": 0.4443066120147705, |
|
"learning_rate": 1.4446793276468029e-07, |
|
"loss": 0.9786, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 0.9854646132548884, |
|
"grad_norm": 0.43723779916763306, |
|
"learning_rate": 1.2869493053880855e-07, |
|
"loss": 0.9376, |
|
"step": 5695 |
|
}, |
|
{ |
|
"epoch": 0.9863298148468593, |
|
"grad_norm": 0.4405401051044464, |
|
"learning_rate": 1.1383290327666718e-07, |
|
"loss": 0.9677, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 0.9871950164388302, |
|
"grad_norm": 0.4371713399887085, |
|
"learning_rate": 9.988198654209413e-08, |
|
"loss": 0.9591, |
|
"step": 5705 |
|
}, |
|
{ |
|
"epoch": 0.9880602180308011, |
|
"grad_norm": 0.4550669491291046, |
|
"learning_rate": 8.684230758818634e-08, |
|
"loss": 1.0056, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 0.9889254196227721, |
|
"grad_norm": 0.42430931329727173, |
|
"learning_rate": 7.471398535625618e-08, |
|
"loss": 0.9978, |
|
"step": 5715 |
|
}, |
|
{ |
|
"epoch": 0.989790621214743, |
|
"grad_norm": 0.4184456467628479, |
|
"learning_rate": 6.349713047464345e-08, |
|
"loss": 0.9465, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 0.9906558228067139, |
|
"grad_norm": 0.43265965580940247, |
|
"learning_rate": 5.319184525774956e-08, |
|
"loss": 1.0002, |
|
"step": 5725 |
|
}, |
|
{ |
|
"epoch": 0.991521024398685, |
|
"grad_norm": 0.47475698590278625, |
|
"learning_rate": 4.37982237051271e-08, |
|
"loss": 0.9465, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 0.9923862259906558, |
|
"grad_norm": 0.4656958281993866, |
|
"learning_rate": 3.531635150059165e-08, |
|
"loss": 0.9793, |
|
"step": 5735 |
|
}, |
|
{ |
|
"epoch": 0.9932514275826267, |
|
"grad_norm": 0.4549233913421631, |
|
"learning_rate": 2.7746306011433576e-08, |
|
"loss": 0.9444, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 0.9941166291745976, |
|
"grad_norm": 0.4547722041606903, |
|
"learning_rate": 2.1088156287729644e-08, |
|
"loss": 0.9762, |
|
"step": 5745 |
|
}, |
|
{ |
|
"epoch": 0.9949818307665687, |
|
"grad_norm": 0.4455709457397461, |
|
"learning_rate": 1.5341963061743515e-08, |
|
"loss": 0.9767, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 0.9958470323585396, |
|
"grad_norm": 0.43941980600357056, |
|
"learning_rate": 1.0507778747281815e-08, |
|
"loss": 0.9707, |
|
"step": 5755 |
|
}, |
|
{ |
|
"epoch": 0.9967122339505105, |
|
"grad_norm": 0.46377772092819214, |
|
"learning_rate": 6.5856474393166666e-09, |
|
"loss": 0.9928, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 0.9975774355424813, |
|
"grad_norm": 0.4075023829936981, |
|
"learning_rate": 3.5756049135304835e-09, |
|
"loss": 0.9541, |
|
"step": 5765 |
|
}, |
|
{ |
|
"epoch": 0.9984426371344524, |
|
"grad_norm": 0.4415639638900757, |
|
"learning_rate": 1.4776786259718123e-09, |
|
"loss": 0.9693, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 0.9993078387264233, |
|
"grad_norm": 0.43806275725364685, |
|
"learning_rate": 2.9188771285548754e-10, |
|
"loss": 0.8886, |
|
"step": 5775 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 5779, |
|
"total_flos": 1.1487027058043781e+19, |
|
"train_loss": 0.0, |
|
"train_runtime": 0.0143, |
|
"train_samples_per_second": 9715326.166, |
|
"train_steps_per_second": 404831.526 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 5779, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.1487027058043781e+19, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|