|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 1090, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0009174311926605505, |
|
"grad_norm": 9.797866581519601, |
|
"learning_rate": 1.8348623853211012e-07, |
|
"loss": 1.146, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0045871559633027525, |
|
"grad_norm": 8.427718555496059, |
|
"learning_rate": 9.174311926605506e-07, |
|
"loss": 1.1407, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.009174311926605505, |
|
"grad_norm": 4.092928127427638, |
|
"learning_rate": 1.8348623853211011e-06, |
|
"loss": 1.052, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.013761467889908258, |
|
"grad_norm": 2.5288720687942816, |
|
"learning_rate": 2.7522935779816517e-06, |
|
"loss": 1.0303, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01834862385321101, |
|
"grad_norm": 2.2519839990179045, |
|
"learning_rate": 3.6697247706422022e-06, |
|
"loss": 0.9989, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.022935779816513763, |
|
"grad_norm": 2.1835658473984263, |
|
"learning_rate": 4.587155963302753e-06, |
|
"loss": 0.998, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.027522935779816515, |
|
"grad_norm": 2.287263603922945, |
|
"learning_rate": 5.504587155963303e-06, |
|
"loss": 0.9951, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03211009174311927, |
|
"grad_norm": 2.434127924697329, |
|
"learning_rate": 6.422018348623854e-06, |
|
"loss": 0.9918, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.03669724770642202, |
|
"grad_norm": 1.8567417457183697, |
|
"learning_rate": 7.3394495412844045e-06, |
|
"loss": 0.9979, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04128440366972477, |
|
"grad_norm": 2.4003193073390423, |
|
"learning_rate": 8.256880733944956e-06, |
|
"loss": 0.9819, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.045871559633027525, |
|
"grad_norm": 2.0201383685594076, |
|
"learning_rate": 9.174311926605506e-06, |
|
"loss": 0.9879, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05045871559633028, |
|
"grad_norm": 2.211858577649561, |
|
"learning_rate": 1.0091743119266055e-05, |
|
"loss": 0.9976, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.05504587155963303, |
|
"grad_norm": 1.943856442458377, |
|
"learning_rate": 1.1009174311926607e-05, |
|
"loss": 1.0015, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05963302752293578, |
|
"grad_norm": 2.158310323614526, |
|
"learning_rate": 1.1926605504587156e-05, |
|
"loss": 0.9854, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.06422018348623854, |
|
"grad_norm": 2.808086678430107, |
|
"learning_rate": 1.2844036697247708e-05, |
|
"loss": 0.99, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06880733944954129, |
|
"grad_norm": 2.9717767512687336, |
|
"learning_rate": 1.3761467889908258e-05, |
|
"loss": 1.0043, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.07339449541284404, |
|
"grad_norm": 2.394726897034978, |
|
"learning_rate": 1.4678899082568809e-05, |
|
"loss": 0.9934, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.0779816513761468, |
|
"grad_norm": 2.7045539878487044, |
|
"learning_rate": 1.559633027522936e-05, |
|
"loss": 0.9956, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.08256880733944955, |
|
"grad_norm": 2.770778069055874, |
|
"learning_rate": 1.6513761467889912e-05, |
|
"loss": 1.0155, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.0871559633027523, |
|
"grad_norm": 2.3762283504209054, |
|
"learning_rate": 1.743119266055046e-05, |
|
"loss": 1.0242, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.09174311926605505, |
|
"grad_norm": 2.509031406395702, |
|
"learning_rate": 1.834862385321101e-05, |
|
"loss": 1.0002, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.0963302752293578, |
|
"grad_norm": 2.1997287382869333, |
|
"learning_rate": 1.9266055045871563e-05, |
|
"loss": 1.0169, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.10091743119266056, |
|
"grad_norm": 2.1841582267708333, |
|
"learning_rate": 1.999994872196626e-05, |
|
"loss": 1.026, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.10550458715596331, |
|
"grad_norm": 2.936698161408905, |
|
"learning_rate": 1.9998154046002822e-05, |
|
"loss": 1.0091, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.11009174311926606, |
|
"grad_norm": 2.3526922344177423, |
|
"learning_rate": 1.999379599421534e-05, |
|
"loss": 1.0111, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.11467889908256881, |
|
"grad_norm": 2.0102554543255153, |
|
"learning_rate": 1.9986875683942535e-05, |
|
"loss": 1.0191, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.11926605504587157, |
|
"grad_norm": 1.8656284139873642, |
|
"learning_rate": 1.9977394889447526e-05, |
|
"loss": 1.0158, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.12385321100917432, |
|
"grad_norm": 1.9067666799593996, |
|
"learning_rate": 1.9965356041462954e-05, |
|
"loss": 0.996, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.12844036697247707, |
|
"grad_norm": 2.09289543401076, |
|
"learning_rate": 1.9950762226567783e-05, |
|
"loss": 1.0223, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.13302752293577982, |
|
"grad_norm": 2.163029935167911, |
|
"learning_rate": 1.9933617186395917e-05, |
|
"loss": 1.0284, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.13761467889908258, |
|
"grad_norm": 2.1355703584941006, |
|
"learning_rate": 1.9913925316676946e-05, |
|
"loss": 1.0222, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.14220183486238533, |
|
"grad_norm": 1.9312734976395127, |
|
"learning_rate": 1.9891691666109112e-05, |
|
"loss": 1.045, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.14678899082568808, |
|
"grad_norm": 2.0041309459260814, |
|
"learning_rate": 1.9866921935064907e-05, |
|
"loss": 1.0342, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.15137614678899083, |
|
"grad_norm": 2.062699433171624, |
|
"learning_rate": 1.9839622474129595e-05, |
|
"loss": 1.0446, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.1559633027522936, |
|
"grad_norm": 1.794061296945893, |
|
"learning_rate": 1.9809800282473014e-05, |
|
"loss": 1.0325, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.16055045871559634, |
|
"grad_norm": 1.7069314198979184, |
|
"learning_rate": 1.977746300605507e-05, |
|
"loss": 1.0405, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.1651376146788991, |
|
"grad_norm": 1.8941763788054986, |
|
"learning_rate": 1.9742618935665478e-05, |
|
"loss": 1.0314, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.16972477064220184, |
|
"grad_norm": 1.890460224553765, |
|
"learning_rate": 1.9705277004798072e-05, |
|
"loss": 1.0371, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.1743119266055046, |
|
"grad_norm": 1.7702822374171037, |
|
"learning_rate": 1.9665446787360444e-05, |
|
"loss": 1.0422, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.17889908256880735, |
|
"grad_norm": 2.292037944956991, |
|
"learning_rate": 1.9623138495219292e-05, |
|
"loss": 1.0258, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.1834862385321101, |
|
"grad_norm": 1.9163146553783408, |
|
"learning_rate": 1.957836297558229e-05, |
|
"loss": 1.0284, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.18807339449541285, |
|
"grad_norm": 1.8935048040607476, |
|
"learning_rate": 1.9531131708217005e-05, |
|
"loss": 1.0282, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.1926605504587156, |
|
"grad_norm": 1.7860045810763372, |
|
"learning_rate": 1.948145680250766e-05, |
|
"loss": 1.0227, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.19724770642201836, |
|
"grad_norm": 1.8083970356723782, |
|
"learning_rate": 1.9429350994350483e-05, |
|
"loss": 1.0347, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.2018348623853211, |
|
"grad_norm": 1.6934436772369132, |
|
"learning_rate": 1.93748276428884e-05, |
|
"loss": 1.0422, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.20642201834862386, |
|
"grad_norm": 1.8100390462895424, |
|
"learning_rate": 1.931790072708596e-05, |
|
"loss": 1.0269, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.21100917431192662, |
|
"grad_norm": 1.8394285930837164, |
|
"learning_rate": 1.9258584842145342e-05, |
|
"loss": 1.0472, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.21559633027522937, |
|
"grad_norm": 2.0864796192720774, |
|
"learning_rate": 1.9196895195764363e-05, |
|
"loss": 1.028, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.22018348623853212, |
|
"grad_norm": 1.9906142761148835, |
|
"learning_rate": 1.913284760423745e-05, |
|
"loss": 1.0321, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.22477064220183487, |
|
"grad_norm": 1.8346043792060736, |
|
"learning_rate": 1.9066458488400586e-05, |
|
"loss": 1.031, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.22935779816513763, |
|
"grad_norm": 1.75807456533747, |
|
"learning_rate": 1.8997744869421248e-05, |
|
"loss": 1.0258, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.23394495412844038, |
|
"grad_norm": 1.7950761001144098, |
|
"learning_rate": 1.8926724364434447e-05, |
|
"loss": 1.0182, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.23853211009174313, |
|
"grad_norm": 1.821365158517499, |
|
"learning_rate": 1.8853415182025953e-05, |
|
"loss": 1.032, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.24311926605504589, |
|
"grad_norm": 1.7138176018422528, |
|
"learning_rate": 1.8777836117563894e-05, |
|
"loss": 1.0285, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.24770642201834864, |
|
"grad_norm": 1.6119249956139892, |
|
"learning_rate": 1.8700006548379898e-05, |
|
"loss": 1.0226, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.25229357798165136, |
|
"grad_norm": 1.6075787071095569, |
|
"learning_rate": 1.861994642880105e-05, |
|
"loss": 1.0441, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.25688073394495414, |
|
"grad_norm": 1.6370412009626971, |
|
"learning_rate": 1.8537676285033886e-05, |
|
"loss": 1.0325, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.26146788990825687, |
|
"grad_norm": 1.7230515975187888, |
|
"learning_rate": 1.845321720990181e-05, |
|
"loss": 1.0348, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.26605504587155965, |
|
"grad_norm": 1.6235398322699879, |
|
"learning_rate": 1.8366590857437182e-05, |
|
"loss": 1.0373, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.2706422018348624, |
|
"grad_norm": 1.8167408195301202, |
|
"learning_rate": 1.8277819437329577e-05, |
|
"loss": 1.0235, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.27522935779816515, |
|
"grad_norm": 1.701844754548217, |
|
"learning_rate": 1.8186925709231534e-05, |
|
"loss": 1.0366, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.2798165137614679, |
|
"grad_norm": 1.5919168264308288, |
|
"learning_rate": 1.809393297692334e-05, |
|
"loss": 1.0292, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.28440366972477066, |
|
"grad_norm": 1.7948244166489418, |
|
"learning_rate": 1.799886508233829e-05, |
|
"loss": 1.0458, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.2889908256880734, |
|
"grad_norm": 1.6845863944203259, |
|
"learning_rate": 1.790174639944997e-05, |
|
"loss": 1.0422, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.29357798165137616, |
|
"grad_norm": 1.6627384237203684, |
|
"learning_rate": 1.780260182802314e-05, |
|
"loss": 1.0309, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.2981651376146789, |
|
"grad_norm": 1.78048904429509, |
|
"learning_rate": 1.7701456787229805e-05, |
|
"loss": 1.0362, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.30275229357798167, |
|
"grad_norm": 1.621206798299626, |
|
"learning_rate": 1.7598337209132142e-05, |
|
"loss": 1.0189, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.3073394495412844, |
|
"grad_norm": 1.5421888344834969, |
|
"learning_rate": 1.7493269532033882e-05, |
|
"loss": 1.0199, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.3119266055045872, |
|
"grad_norm": 1.528075273938717, |
|
"learning_rate": 1.738628069370195e-05, |
|
"loss": 1.0312, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.3165137614678899, |
|
"grad_norm": 1.610547863200134, |
|
"learning_rate": 1.7277398124460022e-05, |
|
"loss": 1.0265, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.3211009174311927, |
|
"grad_norm": 1.547322038003979, |
|
"learning_rate": 1.71666497401558e-05, |
|
"loss": 0.9964, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.3256880733944954, |
|
"grad_norm": 1.622700336087546, |
|
"learning_rate": 1.7054063935003813e-05, |
|
"loss": 1.0288, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.3302752293577982, |
|
"grad_norm": 1.7814950838467476, |
|
"learning_rate": 1.6939669574305565e-05, |
|
"loss": 1.0123, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.3348623853211009, |
|
"grad_norm": 1.522218935182621, |
|
"learning_rate": 1.6823495987048922e-05, |
|
"loss": 1.0139, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.3394495412844037, |
|
"grad_norm": 1.6729699264181774, |
|
"learning_rate": 1.6705572958388576e-05, |
|
"loss": 1.0168, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.3440366972477064, |
|
"grad_norm": 1.6039605046472551, |
|
"learning_rate": 1.6585930722009602e-05, |
|
"loss": 1.0385, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.3486238532110092, |
|
"grad_norm": 1.4788794149889521, |
|
"learning_rate": 1.6464599952375998e-05, |
|
"loss": 1.0258, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.3532110091743119, |
|
"grad_norm": 1.504660109687917, |
|
"learning_rate": 1.63416117568662e-05, |
|
"loss": 1.0199, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.3577981651376147, |
|
"grad_norm": 1.5030704545753268, |
|
"learning_rate": 1.621699766779763e-05, |
|
"loss": 1.0164, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.3623853211009174, |
|
"grad_norm": 1.4694185354290437, |
|
"learning_rate": 1.6090789634342278e-05, |
|
"loss": 1.008, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.3669724770642202, |
|
"grad_norm": 1.502605819170643, |
|
"learning_rate": 1.5963020014335437e-05, |
|
"loss": 1.014, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.37155963302752293, |
|
"grad_norm": 1.5065560011505659, |
|
"learning_rate": 1.583372156597961e-05, |
|
"loss": 1.0068, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.3761467889908257, |
|
"grad_norm": 1.4320112984208702, |
|
"learning_rate": 1.570292743944583e-05, |
|
"loss": 1.0058, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.38073394495412843, |
|
"grad_norm": 1.4326401831034563, |
|
"learning_rate": 1.557067116837444e-05, |
|
"loss": 1.025, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.3853211009174312, |
|
"grad_norm": 1.488582529852783, |
|
"learning_rate": 1.5436986661277578e-05, |
|
"loss": 1.0171, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.38990825688073394, |
|
"grad_norm": 1.5853664334620217, |
|
"learning_rate": 1.530190819284555e-05, |
|
"loss": 1.0062, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.3944954128440367, |
|
"grad_norm": 1.6376346358210006, |
|
"learning_rate": 1.5165470395159314e-05, |
|
"loss": 1.0362, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.39908256880733944, |
|
"grad_norm": 2.107700540823469, |
|
"learning_rate": 1.5027708248811331e-05, |
|
"loss": 1.0036, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.4036697247706422, |
|
"grad_norm": 1.482808306893593, |
|
"learning_rate": 1.4888657073937077e-05, |
|
"loss": 1.0125, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.40825688073394495, |
|
"grad_norm": 1.5321553428199717, |
|
"learning_rate": 1.4748352521159492e-05, |
|
"loss": 0.9996, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.41284403669724773, |
|
"grad_norm": 1.4182290148630876, |
|
"learning_rate": 1.4606830562448692e-05, |
|
"loss": 1.0078, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.41743119266055045, |
|
"grad_norm": 1.652462912329951, |
|
"learning_rate": 1.4464127481899312e-05, |
|
"loss": 1.0095, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.42201834862385323, |
|
"grad_norm": 1.4134871643935982, |
|
"learning_rate": 1.4320279866427798e-05, |
|
"loss": 1.0086, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.42660550458715596, |
|
"grad_norm": 1.4681250077131003, |
|
"learning_rate": 1.4175324596392075e-05, |
|
"loss": 1.0228, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.43119266055045874, |
|
"grad_norm": 1.5879317091831868, |
|
"learning_rate": 1.402929883613599e-05, |
|
"loss": 1.009, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.43577981651376146, |
|
"grad_norm": 1.482581286022142, |
|
"learning_rate": 1.3882240024460928e-05, |
|
"loss": 0.9949, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.44036697247706424, |
|
"grad_norm": 1.5572687872922886, |
|
"learning_rate": 1.3734185865027061e-05, |
|
"loss": 1.0132, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.44495412844036697, |
|
"grad_norm": 1.4871057091418516, |
|
"learning_rate": 1.358517431668672e-05, |
|
"loss": 1.002, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.44954128440366975, |
|
"grad_norm": 1.3354034524950988, |
|
"learning_rate": 1.3435243583752294e-05, |
|
"loss": 0.9966, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.4541284403669725, |
|
"grad_norm": 1.411120877551773, |
|
"learning_rate": 1.3284432106201233e-05, |
|
"loss": 0.9905, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.45871559633027525, |
|
"grad_norm": 1.5259817407747036, |
|
"learning_rate": 1.313277854982062e-05, |
|
"loss": 1.0106, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.463302752293578, |
|
"grad_norm": 1.5742889406672467, |
|
"learning_rate": 1.2980321796293838e-05, |
|
"loss": 0.9891, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.46788990825688076, |
|
"grad_norm": 1.4784810162072364, |
|
"learning_rate": 1.2827100933231904e-05, |
|
"loss": 1.0079, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.4724770642201835, |
|
"grad_norm": 1.6554360721057533, |
|
"learning_rate": 1.2673155244151985e-05, |
|
"loss": 1.0075, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.47706422018348627, |
|
"grad_norm": 1.53715721377512, |
|
"learning_rate": 1.2518524198405699e-05, |
|
"loss": 1.0017, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.481651376146789, |
|
"grad_norm": 1.5071083010755015, |
|
"learning_rate": 1.2363247441059775e-05, |
|
"loss": 0.9877, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.48623853211009177, |
|
"grad_norm": 1.4445010695511022, |
|
"learning_rate": 1.2207364782731657e-05, |
|
"loss": 1.0119, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.4908256880733945, |
|
"grad_norm": 1.3964846678923748, |
|
"learning_rate": 1.2050916189382646e-05, |
|
"loss": 0.9755, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.4954128440366973, |
|
"grad_norm": 1.5013913915153163, |
|
"learning_rate": 1.189394177207125e-05, |
|
"loss": 0.9836, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.3940120479721783, |
|
"learning_rate": 1.1736481776669307e-05, |
|
"loss": 0.9906, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.5045871559633027, |
|
"grad_norm": 1.4590101843259737, |
|
"learning_rate": 1.1578576573543541e-05, |
|
"loss": 0.9667, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.5091743119266054, |
|
"grad_norm": 1.4275421198403777, |
|
"learning_rate": 1.1420266647205232e-05, |
|
"loss": 0.9775, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.5137614678899083, |
|
"grad_norm": 1.4318930366924776, |
|
"learning_rate": 1.1261592585930576e-05, |
|
"loss": 0.987, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.518348623853211, |
|
"grad_norm": 1.4029387505115745, |
|
"learning_rate": 1.1102595071354471e-05, |
|
"loss": 0.9877, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.5229357798165137, |
|
"grad_norm": 1.40361379595912, |
|
"learning_rate": 1.0943314868040365e-05, |
|
"loss": 0.9809, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.5275229357798165, |
|
"grad_norm": 1.620813299969718, |
|
"learning_rate": 1.0783792813028828e-05, |
|
"loss": 0.9916, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.5321100917431193, |
|
"grad_norm": 1.4267199847240755, |
|
"learning_rate": 1.0624069805367558e-05, |
|
"loss": 0.9724, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.536697247706422, |
|
"grad_norm": 1.3967830543656679, |
|
"learning_rate": 1.0464186795625481e-05, |
|
"loss": 0.9754, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.5412844036697247, |
|
"grad_norm": 1.4999292927669836, |
|
"learning_rate": 1.0304184775393642e-05, |
|
"loss": 0.986, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.5458715596330275, |
|
"grad_norm": 1.4423550886024152, |
|
"learning_rate": 1.0144104766775574e-05, |
|
"loss": 0.9714, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.5504587155963303, |
|
"grad_norm": 1.3531728935425589, |
|
"learning_rate": 9.983987811869863e-06, |
|
"loss": 0.964, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.555045871559633, |
|
"grad_norm": 1.3951653536410364, |
|
"learning_rate": 9.823874962247565e-06, |
|
"loss": 0.9889, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.5596330275229358, |
|
"grad_norm": 1.4511174833378555, |
|
"learning_rate": 9.663807268427197e-06, |
|
"loss": 0.9523, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.5642201834862385, |
|
"grad_norm": 1.3735460270489455, |
|
"learning_rate": 9.503825769350016e-06, |
|
"loss": 0.9784, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.5688073394495413, |
|
"grad_norm": 1.4407979891800395, |
|
"learning_rate": 9.343971481858246e-06, |
|
"loss": 0.9751, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.573394495412844, |
|
"grad_norm": 1.3764958465136419, |
|
"learning_rate": 9.184285390178978e-06, |
|
"loss": 0.971, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.5779816513761468, |
|
"grad_norm": 1.3475749847045115, |
|
"learning_rate": 9.024808435416435e-06, |
|
"loss": 0.9588, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.5825688073394495, |
|
"grad_norm": 1.4165511547354543, |
|
"learning_rate": 8.865581505055292e-06, |
|
"loss": 0.9678, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.5871559633027523, |
|
"grad_norm": 1.409796656867394, |
|
"learning_rate": 8.706645422477739e-06, |
|
"loss": 0.9635, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.591743119266055, |
|
"grad_norm": 1.4960028493594866, |
|
"learning_rate": 8.548040936496989e-06, |
|
"loss": 0.9686, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.5963302752293578, |
|
"grad_norm": 1.4512436818877472, |
|
"learning_rate": 8.389808710909881e-06, |
|
"loss": 0.9715, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.6009174311926605, |
|
"grad_norm": 1.389169191448506, |
|
"learning_rate": 8.231989314071318e-06, |
|
"loss": 0.97, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.6055045871559633, |
|
"grad_norm": 1.4286996459347407, |
|
"learning_rate": 8.07462320849313e-06, |
|
"loss": 0.9861, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.6100917431192661, |
|
"grad_norm": 1.389233177009358, |
|
"learning_rate": 7.917750740470116e-06, |
|
"loss": 0.9633, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.6146788990825688, |
|
"grad_norm": 1.3775631463747944, |
|
"learning_rate": 7.761412129735853e-06, |
|
"loss": 0.9625, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.6192660550458715, |
|
"grad_norm": 1.4009260092646365, |
|
"learning_rate": 7.605647459150961e-06, |
|
"loss": 0.9724, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.6238532110091743, |
|
"grad_norm": 1.3225550273394528, |
|
"learning_rate": 7.4504966644264775e-06, |
|
"loss": 0.9526, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.6284403669724771, |
|
"grad_norm": 1.3609358732641874, |
|
"learning_rate": 7.295999523884921e-06, |
|
"loss": 0.9631, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.6330275229357798, |
|
"grad_norm": 1.3744909149495705, |
|
"learning_rate": 7.142195648261747e-06, |
|
"loss": 0.9736, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.6376146788990825, |
|
"grad_norm": 1.344696610604751, |
|
"learning_rate": 6.989124470549746e-06, |
|
"loss": 0.9423, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.6422018348623854, |
|
"grad_norm": 1.337942065126883, |
|
"learning_rate": 6.83682523588902e-06, |
|
"loss": 0.9655, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.6467889908256881, |
|
"grad_norm": 1.3614847141575495, |
|
"learning_rate": 6.685336991505122e-06, |
|
"loss": 0.9678, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.6513761467889908, |
|
"grad_norm": 1.3230900779999575, |
|
"learning_rate": 6.5346985766979384e-06, |
|
"loss": 0.9571, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.6559633027522935, |
|
"grad_norm": 1.334659859344649, |
|
"learning_rate": 6.384948612883872e-06, |
|
"loss": 0.9612, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.6605504587155964, |
|
"grad_norm": 1.30456120316146, |
|
"learning_rate": 6.2361254936939e-06, |
|
"loss": 0.9617, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.6651376146788991, |
|
"grad_norm": 1.323565171442615, |
|
"learning_rate": 6.0882673751300235e-06, |
|
"loss": 0.9507, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.6697247706422018, |
|
"grad_norm": 1.3345956617227264, |
|
"learning_rate": 5.941412165782645e-06, |
|
"loss": 0.9589, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.6743119266055045, |
|
"grad_norm": 1.4304814389946796, |
|
"learning_rate": 5.79559751711138e-06, |
|
"loss": 0.9505, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.6788990825688074, |
|
"grad_norm": 1.3356742272309972, |
|
"learning_rate": 5.650860813791786e-06, |
|
"loss": 0.9518, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.6834862385321101, |
|
"grad_norm": 1.4025057276173587, |
|
"learning_rate": 5.507239164130501e-06, |
|
"loss": 0.9513, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.6880733944954128, |
|
"grad_norm": 1.3358485503012785, |
|
"learning_rate": 5.364769390551225e-06, |
|
"loss": 0.9683, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.6926605504587156, |
|
"grad_norm": 1.3082785185345842, |
|
"learning_rate": 5.223488020154028e-06, |
|
"loss": 0.9624, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.6972477064220184, |
|
"grad_norm": 1.3522908131265046, |
|
"learning_rate": 5.083431275350312e-06, |
|
"loss": 0.9283, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.7018348623853211, |
|
"grad_norm": 1.2922554744460317, |
|
"learning_rate": 4.9446350645759885e-06, |
|
"loss": 0.9441, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.7064220183486238, |
|
"grad_norm": 1.4114956038550543, |
|
"learning_rate": 4.807134973085036e-06, |
|
"loss": 0.9697, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.7110091743119266, |
|
"grad_norm": 1.2696644616371817, |
|
"learning_rate": 4.670966253826027e-06, |
|
"loss": 0.9373, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.7155963302752294, |
|
"grad_norm": 1.4629620137851231, |
|
"learning_rate": 4.53616381840377e-06, |
|
"loss": 0.95, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.7201834862385321, |
|
"grad_norm": 1.3714565256021574, |
|
"learning_rate": 4.402762228128531e-06, |
|
"loss": 0.9417, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.7247706422018348, |
|
"grad_norm": 1.4183545202083996, |
|
"learning_rate": 4.270795685155001e-06, |
|
"loss": 0.9499, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.7293577981651376, |
|
"grad_norm": 1.3205369291505886, |
|
"learning_rate": 4.140298023713416e-06, |
|
"loss": 0.9323, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.7339449541284404, |
|
"grad_norm": 1.370701834137729, |
|
"learning_rate": 4.0113027014349374e-06, |
|
"loss": 0.9168, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.7385321100917431, |
|
"grad_norm": 1.3042672508858097, |
|
"learning_rate": 3.883842790773647e-06, |
|
"loss": 0.9592, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.7431192660550459, |
|
"grad_norm": 1.3064123913534935, |
|
"learning_rate": 3.757950970527249e-06, |
|
"loss": 0.9284, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.7477064220183486, |
|
"grad_norm": 1.3065430427090394, |
|
"learning_rate": 3.633659517458736e-06, |
|
"loss": 0.9273, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.7522935779816514, |
|
"grad_norm": 1.2703070702836412, |
|
"learning_rate": 3.511000298021098e-06, |
|
"loss": 0.9253, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.7568807339449541, |
|
"grad_norm": 1.3398945194343388, |
|
"learning_rate": 3.39000476018726e-06, |
|
"loss": 0.9288, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.7614678899082569, |
|
"grad_norm": 1.2778852672739778, |
|
"learning_rate": 3.2707039253872796e-06, |
|
"loss": 0.9254, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.7660550458715596, |
|
"grad_norm": 1.3303966067925974, |
|
"learning_rate": 3.153128380554941e-06, |
|
"loss": 0.9338, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.7706422018348624, |
|
"grad_norm": 1.3030003909557433, |
|
"learning_rate": 3.037308270285709e-06, |
|
"loss": 0.925, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.7752293577981652, |
|
"grad_norm": 1.2744506690007673, |
|
"learning_rate": 2.923273289108115e-06, |
|
"loss": 0.9428, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.7798165137614679, |
|
"grad_norm": 1.2923362843968602, |
|
"learning_rate": 2.8110526738705345e-06, |
|
"loss": 0.9262, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.7844036697247706, |
|
"grad_norm": 1.307141046764321, |
|
"learning_rate": 2.700675196245288e-06, |
|
"loss": 0.9419, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.7889908256880734, |
|
"grad_norm": 1.272783570802427, |
|
"learning_rate": 2.592169155352031e-06, |
|
"loss": 0.9431, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.7935779816513762, |
|
"grad_norm": 1.2769597385673763, |
|
"learning_rate": 2.485562370502279e-06, |
|
"loss": 0.9458, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.7981651376146789, |
|
"grad_norm": 1.348054241772535, |
|
"learning_rate": 2.3808821740669608e-06, |
|
"loss": 0.9381, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.8027522935779816, |
|
"grad_norm": 1.3328586135999967, |
|
"learning_rate": 2.2781554044688015e-06, |
|
"loss": 0.9402, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.8073394495412844, |
|
"grad_norm": 1.3091185173403772, |
|
"learning_rate": 2.1774083993013715e-06, |
|
"loss": 0.9376, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.8119266055045872, |
|
"grad_norm": 1.3126841182660047, |
|
"learning_rate": 2.0786669885765044e-06, |
|
"loss": 0.9286, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.8165137614678899, |
|
"grad_norm": 1.299660265289108, |
|
"learning_rate": 1.981956488101898e-06, |
|
"loss": 0.9419, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.8211009174311926, |
|
"grad_norm": 1.331079398798441, |
|
"learning_rate": 1.8873016929904942e-06, |
|
"loss": 0.9444, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.8256880733944955, |
|
"grad_norm": 1.2590818401281205, |
|
"learning_rate": 1.7947268713034128e-06, |
|
"loss": 0.9136, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.8302752293577982, |
|
"grad_norm": 1.26927535411352, |
|
"learning_rate": 1.704255757827963e-06, |
|
"loss": 0.9175, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.8348623853211009, |
|
"grad_norm": 1.3210015666942456, |
|
"learning_rate": 1.6159115479924259e-06, |
|
"loss": 0.9328, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.8394495412844036, |
|
"grad_norm": 1.3123997636377258, |
|
"learning_rate": 1.529716891919074e-06, |
|
"loss": 0.9416, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.8440366972477065, |
|
"grad_norm": 1.2908364394847265, |
|
"learning_rate": 1.4456938886170413e-06, |
|
"loss": 0.9129, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.8486238532110092, |
|
"grad_norm": 1.3182702591467457, |
|
"learning_rate": 1.3638640803164516e-06, |
|
"loss": 0.9261, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.8532110091743119, |
|
"grad_norm": 1.4674739471762706, |
|
"learning_rate": 1.2842484469453365e-06, |
|
"loss": 0.9159, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.8577981651376146, |
|
"grad_norm": 1.3130778393525182, |
|
"learning_rate": 1.2068674007506787e-06, |
|
"loss": 0.9304, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.8623853211009175, |
|
"grad_norm": 1.3697197759877109, |
|
"learning_rate": 1.1317407810650372e-06, |
|
"loss": 0.9289, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.8669724770642202, |
|
"grad_norm": 1.2668626903844098, |
|
"learning_rate": 1.0588878492200261e-06, |
|
"loss": 0.9349, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.8715596330275229, |
|
"grad_norm": 1.2955211201170846, |
|
"learning_rate": 9.883272836080116e-07, |
|
"loss": 0.9221, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.8761467889908257, |
|
"grad_norm": 1.30796302659334, |
|
"learning_rate": 9.200771748932513e-07, |
|
"loss": 0.935, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.8807339449541285, |
|
"grad_norm": 1.302371050855527, |
|
"learning_rate": 8.541550213737171e-07, |
|
"loss": 0.9131, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.8853211009174312, |
|
"grad_norm": 1.2686626009853337, |
|
"learning_rate": 7.905777244947954e-07, |
|
"loss": 0.9196, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.8899082568807339, |
|
"grad_norm": 1.33952018651438, |
|
"learning_rate": 7.293615845160196e-07, |
|
"loss": 0.9303, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.8944954128440367, |
|
"grad_norm": 1.3142004093323376, |
|
"learning_rate": 6.705222963319191e-07, |
|
"loss": 0.9406, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.8990825688073395, |
|
"grad_norm": 1.2910709983882336, |
|
"learning_rate": 6.140749454480932e-07, |
|
"loss": 0.9134, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.9036697247706422, |
|
"grad_norm": 1.3052663290487918, |
|
"learning_rate": 5.600340041135133e-07, |
|
"loss": 0.9199, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.908256880733945, |
|
"grad_norm": 1.3197825777884429, |
|
"learning_rate": 5.0841332761005e-07, |
|
"loss": 0.9351, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.9128440366972477, |
|
"grad_norm": 1.2050266272600134, |
|
"learning_rate": 4.592261507001994e-07, |
|
"loss": 0.9164, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.9174311926605505, |
|
"grad_norm": 1.2774181379109981, |
|
"learning_rate": 4.124850842338779e-07, |
|
"loss": 0.9224, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.9220183486238532, |
|
"grad_norm": 1.2851660562840292, |
|
"learning_rate": 3.6820211191520127e-07, |
|
"loss": 0.9261, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.926605504587156, |
|
"grad_norm": 1.3016157142054157, |
|
"learning_rate": 3.263885872300343e-07, |
|
"loss": 0.9252, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.9311926605504587, |
|
"grad_norm": 1.2470330025991352, |
|
"learning_rate": 2.870552305351382e-07, |
|
"loss": 0.903, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.9357798165137615, |
|
"grad_norm": 1.2776227693612163, |
|
"learning_rate": 2.5021212630962246e-07, |
|
"loss": 0.9297, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.9403669724770642, |
|
"grad_norm": 1.2381197028921025, |
|
"learning_rate": 2.158687205694443e-07, |
|
"loss": 0.9094, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.944954128440367, |
|
"grad_norm": 1.2836110047083185, |
|
"learning_rate": 1.840338184455881e-07, |
|
"loss": 0.9294, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.9495412844036697, |
|
"grad_norm": 1.2718038074603693, |
|
"learning_rate": 1.5471558192656776e-07, |
|
"loss": 0.9187, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.9541284403669725, |
|
"grad_norm": 1.241258270519395, |
|
"learning_rate": 1.279215277658097e-07, |
|
"loss": 0.9341, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.9587155963302753, |
|
"grad_norm": 1.305186814508952, |
|
"learning_rate": 1.0365852555447642e-07, |
|
"loss": 0.9275, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.963302752293578, |
|
"grad_norm": 1.2717320188551713, |
|
"learning_rate": 8.19327959602012e-08, |
|
"loss": 0.9371, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.9678899082568807, |
|
"grad_norm": 1.284871662304039, |
|
"learning_rate": 6.274990913221035e-08, |
|
"loss": 0.9311, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.9724770642201835, |
|
"grad_norm": 1.2652485475313338, |
|
"learning_rate": 4.6114783273213395e-08, |
|
"loss": 0.9158, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.9770642201834863, |
|
"grad_norm": 1.2767791748309611, |
|
"learning_rate": 3.203168337845508e-08, |
|
"loss": 0.9089, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.981651376146789, |
|
"grad_norm": 1.2768060229936837, |
|
"learning_rate": 2.05042201422323e-08, |
|
"loss": 0.9185, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.9862385321100917, |
|
"grad_norm": 1.3226366887213121, |
|
"learning_rate": 1.1535349032167908e-08, |
|
"loss": 0.9011, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.9908256880733946, |
|
"grad_norm": 1.2648568877097428, |
|
"learning_rate": 5.127369531473525e-09, |
|
"loss": 0.9305, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.9954128440366973, |
|
"grad_norm": 1.2794584435070722, |
|
"learning_rate": 1.2819245493955746e-09, |
|
"loss": 0.9269, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.381828304372254, |
|
"learning_rate": 0.0, |
|
"loss": 0.923, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.9389663338661194, |
|
"eval_runtime": 334.0997, |
|
"eval_samples_per_second": 46.187, |
|
"eval_steps_per_second": 0.724, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 1090, |
|
"total_flos": 456447649382400.0, |
|
"train_loss": 0.9810463331161289, |
|
"train_runtime": 13406.7569, |
|
"train_samples_per_second": 10.4, |
|
"train_steps_per_second": 0.081 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1090, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"total_flos": 456447649382400.0, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|