|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.9961919268849961, |
|
"eval_steps": 500, |
|
"global_step": 218, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00913937547600914, |
|
"grad_norm": 3.3369264602661133, |
|
"learning_rate": 9.090909090909091e-07, |
|
"loss": 0.6708, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01827875095201828, |
|
"grad_norm": 3.4893620014190674, |
|
"learning_rate": 1.8181818181818183e-06, |
|
"loss": 0.6794, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.027418126428027417, |
|
"grad_norm": 3.4242701530456543, |
|
"learning_rate": 2.7272727272727272e-06, |
|
"loss": 0.6599, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.03655750190403656, |
|
"grad_norm": 3.155944347381592, |
|
"learning_rate": 3.6363636363636366e-06, |
|
"loss": 0.6689, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0456968773800457, |
|
"grad_norm": 2.458157539367676, |
|
"learning_rate": 4.5454545454545455e-06, |
|
"loss": 0.6512, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.05483625285605483, |
|
"grad_norm": 1.8310825824737549, |
|
"learning_rate": 5.4545454545454545e-06, |
|
"loss": 0.6187, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.06397562833206398, |
|
"grad_norm": 1.7105038166046143, |
|
"learning_rate": 6.363636363636364e-06, |
|
"loss": 0.6179, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.07311500380807312, |
|
"grad_norm": 1.7482938766479492, |
|
"learning_rate": 7.272727272727273e-06, |
|
"loss": 0.6075, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.08225437928408226, |
|
"grad_norm": 1.6708910465240479, |
|
"learning_rate": 8.181818181818183e-06, |
|
"loss": 0.5964, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0913937547600914, |
|
"grad_norm": 1.1437326669692993, |
|
"learning_rate": 9.090909090909091e-06, |
|
"loss": 0.5533, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.10053313023610053, |
|
"grad_norm": 1.5597337484359741, |
|
"learning_rate": 1e-05, |
|
"loss": 0.5382, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.10967250571210967, |
|
"grad_norm": 1.0245758295059204, |
|
"learning_rate": 1.0909090909090909e-05, |
|
"loss": 0.5104, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.1188118811881188, |
|
"grad_norm": 0.8286274671554565, |
|
"learning_rate": 1.181818181818182e-05, |
|
"loss": 0.5173, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.12795125666412796, |
|
"grad_norm": 1.0838040113449097, |
|
"learning_rate": 1.2727272727272728e-05, |
|
"loss": 0.5133, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.1370906321401371, |
|
"grad_norm": 1.0685889720916748, |
|
"learning_rate": 1.3636363636363637e-05, |
|
"loss": 0.5109, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.14623000761614624, |
|
"grad_norm": 0.794349193572998, |
|
"learning_rate": 1.4545454545454546e-05, |
|
"loss": 0.4618, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.15536938309215537, |
|
"grad_norm": 0.8422227501869202, |
|
"learning_rate": 1.5454545454545454e-05, |
|
"loss": 0.4867, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.16450875856816452, |
|
"grad_norm": 0.7492879629135132, |
|
"learning_rate": 1.6363636363636366e-05, |
|
"loss": 0.4731, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.17364813404417365, |
|
"grad_norm": 0.6844476461410522, |
|
"learning_rate": 1.7272727272727274e-05, |
|
"loss": 0.4439, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.1827875095201828, |
|
"grad_norm": 0.7250885367393494, |
|
"learning_rate": 1.8181818181818182e-05, |
|
"loss": 0.4716, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.19192688499619193, |
|
"grad_norm": 0.67299485206604, |
|
"learning_rate": 1.9090909090909094e-05, |
|
"loss": 0.4412, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.20106626047220105, |
|
"grad_norm": 0.6760995388031006, |
|
"learning_rate": 2e-05, |
|
"loss": 0.4459, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.2102056359482102, |
|
"grad_norm": 0.5327174067497253, |
|
"learning_rate": 1.9998715457999313e-05, |
|
"loss": 0.45, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.21934501142421933, |
|
"grad_norm": 0.5728371143341064, |
|
"learning_rate": 1.999486216200688e-05, |
|
"loss": 0.4362, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.2284843869002285, |
|
"grad_norm": 0.4978387951850891, |
|
"learning_rate": 1.9988441101966807e-05, |
|
"loss": 0.4562, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.2376237623762376, |
|
"grad_norm": 0.5106445550918579, |
|
"learning_rate": 1.9979453927503366e-05, |
|
"loss": 0.4684, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.24676313785224677, |
|
"grad_norm": 0.4982711374759674, |
|
"learning_rate": 1.9967902947497158e-05, |
|
"loss": 0.4526, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.2559025133282559, |
|
"grad_norm": 0.5087048411369324, |
|
"learning_rate": 1.9953791129491985e-05, |
|
"loss": 0.4468, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.26504188880426505, |
|
"grad_norm": 0.47533684968948364, |
|
"learning_rate": 1.9937122098932428e-05, |
|
"loss": 0.4272, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.2741812642802742, |
|
"grad_norm": 0.4664396643638611, |
|
"learning_rate": 1.991790013823246e-05, |
|
"loss": 0.4456, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.2833206397562833, |
|
"grad_norm": 0.44873371720314026, |
|
"learning_rate": 1.9896130185675263e-05, |
|
"loss": 0.4436, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.2924600152322925, |
|
"grad_norm": 0.46020084619522095, |
|
"learning_rate": 1.9871817834144506e-05, |
|
"loss": 0.4285, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.3015993907083016, |
|
"grad_norm": 0.6484828591346741, |
|
"learning_rate": 1.9844969329687526e-05, |
|
"loss": 0.4385, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.31073876618431073, |
|
"grad_norm": 0.40165239572525024, |
|
"learning_rate": 1.9815591569910654e-05, |
|
"loss": 0.4159, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.31987814166031986, |
|
"grad_norm": 0.42455559968948364, |
|
"learning_rate": 1.9783692102207156e-05, |
|
"loss": 0.4407, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.32901751713632904, |
|
"grad_norm": 0.4265769124031067, |
|
"learning_rate": 1.9749279121818235e-05, |
|
"loss": 0.3885, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.33815689261233817, |
|
"grad_norm": 0.38099807500839233, |
|
"learning_rate": 1.971236146972764e-05, |
|
"loss": 0.3883, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.3472962680883473, |
|
"grad_norm": 0.43669813871383667, |
|
"learning_rate": 1.9672948630390296e-05, |
|
"loss": 0.3881, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.3564356435643564, |
|
"grad_norm": 0.39924174547195435, |
|
"learning_rate": 1.9631050729295705e-05, |
|
"loss": 0.4378, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.3655750190403656, |
|
"grad_norm": 0.44854018092155457, |
|
"learning_rate": 1.9586678530366607e-05, |
|
"loss": 0.4125, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.3747143945163747, |
|
"grad_norm": 0.3688861131668091, |
|
"learning_rate": 1.953984343319364e-05, |
|
"loss": 0.3944, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.38385376999238385, |
|
"grad_norm": 0.3711301386356354, |
|
"learning_rate": 1.949055747010669e-05, |
|
"loss": 0.4164, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.392993145468393, |
|
"grad_norm": 0.4008907675743103, |
|
"learning_rate": 1.9438833303083677e-05, |
|
"loss": 0.4196, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.4021325209444021, |
|
"grad_norm": 0.42354318499565125, |
|
"learning_rate": 1.9384684220497605e-05, |
|
"loss": 0.3882, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.4112718964204113, |
|
"grad_norm": 0.42434418201446533, |
|
"learning_rate": 1.932812413370265e-05, |
|
"loss": 0.4301, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.4204112718964204, |
|
"grad_norm": 0.4077893793582916, |
|
"learning_rate": 1.926916757346022e-05, |
|
"loss": 0.3943, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.42955064737242954, |
|
"grad_norm": 0.3807850182056427, |
|
"learning_rate": 1.9207829686205882e-05, |
|
"loss": 0.3888, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.43869002284843867, |
|
"grad_norm": 0.3933984637260437, |
|
"learning_rate": 1.9144126230158127e-05, |
|
"loss": 0.4084, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.44782939832444785, |
|
"grad_norm": 0.45636460185050964, |
|
"learning_rate": 1.9078073571269922e-05, |
|
"loss": 0.3892, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.456968773800457, |
|
"grad_norm": 0.3460674583911896, |
|
"learning_rate": 1.900968867902419e-05, |
|
"loss": 0.3911, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.4661081492764661, |
|
"grad_norm": 0.40278884768486023, |
|
"learning_rate": 1.8938989122074195e-05, |
|
"loss": 0.3985, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.4752475247524752, |
|
"grad_norm": 0.4178655743598938, |
|
"learning_rate": 1.8865993063730003e-05, |
|
"loss": 0.4156, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.4843869002284844, |
|
"grad_norm": 0.3947036862373352, |
|
"learning_rate": 1.8790719257292175e-05, |
|
"loss": 0.4189, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.49352627570449353, |
|
"grad_norm": 0.3686043322086334, |
|
"learning_rate": 1.8713187041233896e-05, |
|
"loss": 0.4105, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.5026656511805027, |
|
"grad_norm": 0.4284152090549469, |
|
"learning_rate": 1.8633416334232754e-05, |
|
"loss": 0.389, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.5118050266565118, |
|
"grad_norm": 0.40143266320228577, |
|
"learning_rate": 1.8551427630053464e-05, |
|
"loss": 0.4168, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.5209444021325209, |
|
"grad_norm": 0.37129855155944824, |
|
"learning_rate": 1.8467241992282842e-05, |
|
"loss": 0.3745, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.5300837776085301, |
|
"grad_norm": 0.4992600679397583, |
|
"learning_rate": 1.8380881048918406e-05, |
|
"loss": 0.4024, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.5392231530845393, |
|
"grad_norm": 0.403962641954422, |
|
"learning_rate": 1.8292366986811952e-05, |
|
"loss": 0.379, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.5483625285605483, |
|
"grad_norm": 0.3711932301521301, |
|
"learning_rate": 1.820172254596956e-05, |
|
"loss": 0.3959, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.5575019040365575, |
|
"grad_norm": 0.45666763186454773, |
|
"learning_rate": 1.8108971013709512e-05, |
|
"loss": 0.3833, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.5666412795125666, |
|
"grad_norm": 0.34401726722717285, |
|
"learning_rate": 1.8014136218679566e-05, |
|
"loss": 0.3847, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.5757806549885758, |
|
"grad_norm": 0.36564773321151733, |
|
"learning_rate": 1.79172425247352e-05, |
|
"loss": 0.384, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.584920030464585, |
|
"grad_norm": 0.3763687014579773, |
|
"learning_rate": 1.78183148246803e-05, |
|
"loss": 0.3827, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.594059405940594, |
|
"grad_norm": 0.3825934827327728, |
|
"learning_rate": 1.771737853387202e-05, |
|
"loss": 0.3971, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.6031987814166032, |
|
"grad_norm": 0.3348219394683838, |
|
"learning_rate": 1.7614459583691346e-05, |
|
"loss": 0.3965, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.6123381568926123, |
|
"grad_norm": 0.3712629973888397, |
|
"learning_rate": 1.7509584414881114e-05, |
|
"loss": 0.3933, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.6214775323686215, |
|
"grad_norm": 0.3879682123661041, |
|
"learning_rate": 1.7402779970753156e-05, |
|
"loss": 0.4008, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.6306169078446306, |
|
"grad_norm": 0.3735283315181732, |
|
"learning_rate": 1.7294073690266343e-05, |
|
"loss": 0.4058, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.6397562833206397, |
|
"grad_norm": 0.36928698420524597, |
|
"learning_rate": 1.7183493500977277e-05, |
|
"loss": 0.3911, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.6488956587966489, |
|
"grad_norm": 0.32693177461624146, |
|
"learning_rate": 1.7071067811865477e-05, |
|
"loss": 0.389, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.6580350342726581, |
|
"grad_norm": 0.35049450397491455, |
|
"learning_rate": 1.6956825506034866e-05, |
|
"loss": 0.3824, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.6671744097486672, |
|
"grad_norm": 0.3910599946975708, |
|
"learning_rate": 1.6840795933293464e-05, |
|
"loss": 0.3797, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.6763137852246763, |
|
"grad_norm": 0.34450212121009827, |
|
"learning_rate": 1.672300890261317e-05, |
|
"loss": 0.3769, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.6854531607006854, |
|
"grad_norm": 0.3638371229171753, |
|
"learning_rate": 1.6603494674471595e-05, |
|
"loss": 0.3765, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.6945925361766946, |
|
"grad_norm": 0.3630662262439728, |
|
"learning_rate": 1.6482283953077887e-05, |
|
"loss": 0.402, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.7037319116527038, |
|
"grad_norm": 0.3464076519012451, |
|
"learning_rate": 1.635940787848455e-05, |
|
"loss": 0.3776, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.7128712871287128, |
|
"grad_norm": 0.35827362537384033, |
|
"learning_rate": 1.6234898018587336e-05, |
|
"loss": 0.381, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.722010662604722, |
|
"grad_norm": 0.3732692301273346, |
|
"learning_rate": 1.6108786361015145e-05, |
|
"loss": 0.3723, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.7311500380807312, |
|
"grad_norm": 0.3525165617465973, |
|
"learning_rate": 1.598110530491216e-05, |
|
"loss": 0.4152, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.7402894135567403, |
|
"grad_norm": 0.39010149240493774, |
|
"learning_rate": 1.5851887652614238e-05, |
|
"loss": 0.3937, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.7494287890327495, |
|
"grad_norm": 0.349071741104126, |
|
"learning_rate": 1.5721166601221697e-05, |
|
"loss": 0.4073, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.7585681645087585, |
|
"grad_norm": 0.33521854877471924, |
|
"learning_rate": 1.5588975734070717e-05, |
|
"loss": 0.3756, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.7677075399847677, |
|
"grad_norm": 0.36783722043037415, |
|
"learning_rate": 1.5455349012105488e-05, |
|
"loss": 0.3902, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.7768469154607769, |
|
"grad_norm": 0.3330143690109253, |
|
"learning_rate": 1.5320320765153367e-05, |
|
"loss": 0.3879, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.785986290936786, |
|
"grad_norm": 0.35695841908454895, |
|
"learning_rate": 1.5183925683105254e-05, |
|
"loss": 0.3887, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.7951256664127951, |
|
"grad_norm": 0.35579657554626465, |
|
"learning_rate": 1.504619880700346e-05, |
|
"loss": 0.3859, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.8042650418888042, |
|
"grad_norm": 0.3615005314350128, |
|
"learning_rate": 1.4907175520039381e-05, |
|
"loss": 0.381, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.8134044173648134, |
|
"grad_norm": 0.37881505489349365, |
|
"learning_rate": 1.4766891538463255e-05, |
|
"loss": 0.3798, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.8225437928408226, |
|
"grad_norm": 0.3448372483253479, |
|
"learning_rate": 1.4625382902408356e-05, |
|
"loss": 0.376, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.8316831683168316, |
|
"grad_norm": 0.34421786665916443, |
|
"learning_rate": 1.448268596663197e-05, |
|
"loss": 0.3692, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.8408225437928408, |
|
"grad_norm": 0.32549339532852173, |
|
"learning_rate": 1.4338837391175582e-05, |
|
"loss": 0.385, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.84996191926885, |
|
"grad_norm": 0.34452754259109497, |
|
"learning_rate": 1.419387413194657e-05, |
|
"loss": 0.3711, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.8591012947448591, |
|
"grad_norm": 0.3506733477115631, |
|
"learning_rate": 1.4047833431223938e-05, |
|
"loss": 0.3772, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.8682406702208683, |
|
"grad_norm": 0.3662183880805969, |
|
"learning_rate": 1.390075280809047e-05, |
|
"loss": 0.3791, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.8773800456968773, |
|
"grad_norm": 0.3463532030582428, |
|
"learning_rate": 1.3752670048793744e-05, |
|
"loss": 0.3767, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.8865194211728865, |
|
"grad_norm": 0.3397160768508911, |
|
"learning_rate": 1.3603623197038536e-05, |
|
"loss": 0.3831, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.8956587966488957, |
|
"grad_norm": 0.3651241660118103, |
|
"learning_rate": 1.3453650544213078e-05, |
|
"loss": 0.3933, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.9047981721249048, |
|
"grad_norm": 0.3426673710346222, |
|
"learning_rate": 1.3302790619551673e-05, |
|
"loss": 0.366, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.913937547600914, |
|
"grad_norm": 0.32238805294036865, |
|
"learning_rate": 1.315108218023621e-05, |
|
"loss": 0.3824, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.9230769230769231, |
|
"grad_norm": 0.3311628997325897, |
|
"learning_rate": 1.2998564201439117e-05, |
|
"loss": 0.3845, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.9322162985529322, |
|
"grad_norm": 0.34884315729141235, |
|
"learning_rate": 1.2845275866310325e-05, |
|
"loss": 0.3819, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.9413556740289414, |
|
"grad_norm": 0.32266736030578613, |
|
"learning_rate": 1.2691256555910769e-05, |
|
"loss": 0.3759, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.9504950495049505, |
|
"grad_norm": 0.3028276264667511, |
|
"learning_rate": 1.2536545839095074e-05, |
|
"loss": 0.3963, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.9596344249809596, |
|
"grad_norm": 0.3258204460144043, |
|
"learning_rate": 1.2381183462345983e-05, |
|
"loss": 0.3805, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.9687738004569688, |
|
"grad_norm": 0.30626070499420166, |
|
"learning_rate": 1.2225209339563144e-05, |
|
"loss": 0.3781, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.9779131759329779, |
|
"grad_norm": 0.29698455333709717, |
|
"learning_rate": 1.206866354180891e-05, |
|
"loss": 0.3777, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.9870525514089871, |
|
"grad_norm": 0.30517199635505676, |
|
"learning_rate": 1.1911586287013726e-05, |
|
"loss": 0.3822, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.9961919268849961, |
|
"grad_norm": 0.30879127979278564, |
|
"learning_rate": 1.1754017929643818e-05, |
|
"loss": 0.3761, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.0053313023610053, |
|
"grad_norm": 0.547318160533905, |
|
"learning_rate": 1.1595998950333794e-05, |
|
"loss": 0.5449, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.0144706778370145, |
|
"grad_norm": 0.37278491258621216, |
|
"learning_rate": 1.143756994548682e-05, |
|
"loss": 0.3479, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.0236100533130237, |
|
"grad_norm": 0.35126781463623047, |
|
"learning_rate": 1.1278771616845061e-05, |
|
"loss": 0.338, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.0327494287890326, |
|
"grad_norm": 0.37905386090278625, |
|
"learning_rate": 1.1119644761033079e-05, |
|
"loss": 0.3353, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.0418888042650418, |
|
"grad_norm": 0.41645359992980957, |
|
"learning_rate": 1.0960230259076819e-05, |
|
"loss": 0.3257, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.051028179741051, |
|
"grad_norm": 0.418813019990921, |
|
"learning_rate": 1.0800569065900935e-05, |
|
"loss": 0.3521, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.0601675552170602, |
|
"grad_norm": 0.41467365622520447, |
|
"learning_rate": 1.064070219980713e-05, |
|
"loss": 0.3117, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.0693069306930694, |
|
"grad_norm": 0.4041651487350464, |
|
"learning_rate": 1.0480670731936209e-05, |
|
"loss": 0.3093, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.0784463061690786, |
|
"grad_norm": 0.3533031642436981, |
|
"learning_rate": 1.0320515775716556e-05, |
|
"loss": 0.2998, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.0875856816450875, |
|
"grad_norm": 0.41691580414772034, |
|
"learning_rate": 1.0160278476301739e-05, |
|
"loss": 0.3737, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.0967250571210967, |
|
"grad_norm": 0.36301714181900024, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3267, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.1058644325971059, |
|
"grad_norm": 0.3264053761959076, |
|
"learning_rate": 9.839721523698265e-06, |
|
"loss": 0.2584, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.115003808073115, |
|
"grad_norm": 0.4043513238430023, |
|
"learning_rate": 9.67948422428345e-06, |
|
"loss": 0.3526, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.1241431835491242, |
|
"grad_norm": 0.3653642237186432, |
|
"learning_rate": 9.519329268063795e-06, |
|
"loss": 0.3425, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.1332825590251332, |
|
"grad_norm": 0.33410775661468506, |
|
"learning_rate": 9.359297800192873e-06, |
|
"loss": 0.3097, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.1424219345011424, |
|
"grad_norm": 0.340742826461792, |
|
"learning_rate": 9.199430934099068e-06, |
|
"loss": 0.3052, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.1515613099771516, |
|
"grad_norm": 0.3352021872997284, |
|
"learning_rate": 9.039769740923183e-06, |
|
"loss": 0.2977, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.1607006854531607, |
|
"grad_norm": 0.3389112949371338, |
|
"learning_rate": 8.880355238966923e-06, |
|
"loss": 0.3113, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.16984006092917, |
|
"grad_norm": 0.3363248407840729, |
|
"learning_rate": 8.721228383154939e-06, |
|
"loss": 0.3205, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.178979436405179, |
|
"grad_norm": 0.34208640456199646, |
|
"learning_rate": 8.562430054513184e-06, |
|
"loss": 0.3176, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.188118811881188, |
|
"grad_norm": 0.3295469284057617, |
|
"learning_rate": 8.404001049666211e-06, |
|
"loss": 0.3201, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.1972581873571972, |
|
"grad_norm": 0.3238345682621002, |
|
"learning_rate": 8.245982070356186e-06, |
|
"loss": 0.297, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.2063975628332064, |
|
"grad_norm": 0.3155887722969055, |
|
"learning_rate": 8.08841371298628e-06, |
|
"loss": 0.3163, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.2155369383092156, |
|
"grad_norm": 0.30575722455978394, |
|
"learning_rate": 7.931336458191092e-06, |
|
"loss": 0.3421, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.2246763137852246, |
|
"grad_norm": 0.31150344014167786, |
|
"learning_rate": 7.774790660436857e-06, |
|
"loss": 0.3097, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.2338156892612338, |
|
"grad_norm": 0.32414835691452026, |
|
"learning_rate": 7.618816537654018e-06, |
|
"loss": 0.3201, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.242955064737243, |
|
"grad_norm": 0.2960313558578491, |
|
"learning_rate": 7.463454160904928e-06, |
|
"loss": 0.2807, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.2520944402132521, |
|
"grad_norm": 0.308651864528656, |
|
"learning_rate": 7.308743444089232e-06, |
|
"loss": 0.3258, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.2612338156892613, |
|
"grad_norm": 0.3236067593097687, |
|
"learning_rate": 7.154724133689677e-06, |
|
"loss": 0.2958, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.2703731911652705, |
|
"grad_norm": 0.3053479492664337, |
|
"learning_rate": 7.001435798560884e-06, |
|
"loss": 0.321, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.2795125666412794, |
|
"grad_norm": 0.3023087978363037, |
|
"learning_rate": 6.848917819763794e-06, |
|
"loss": 0.3006, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.2886519421172886, |
|
"grad_norm": 0.3476320505142212, |
|
"learning_rate": 6.697209380448333e-06, |
|
"loss": 0.3311, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.2977913175932978, |
|
"grad_norm": 0.3587212562561035, |
|
"learning_rate": 6.546349455786926e-06, |
|
"loss": 0.3646, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.306930693069307, |
|
"grad_norm": 0.28520774841308594, |
|
"learning_rate": 6.396376802961468e-06, |
|
"loss": 0.3165, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.316070068545316, |
|
"grad_norm": 0.28903257846832275, |
|
"learning_rate": 6.24732995120626e-06, |
|
"loss": 0.3019, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.3252094440213251, |
|
"grad_norm": 0.3147772252559662, |
|
"learning_rate": 6.099247191909532e-06, |
|
"loss": 0.2998, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.3343488194973343, |
|
"grad_norm": 0.32797104120254517, |
|
"learning_rate": 5.952166568776062e-06, |
|
"loss": 0.3368, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.3434881949733435, |
|
"grad_norm": 0.2969732880592346, |
|
"learning_rate": 5.806125868053433e-06, |
|
"loss": 0.2851, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.3526275704493527, |
|
"grad_norm": 0.3113080561161041, |
|
"learning_rate": 5.66116260882442e-06, |
|
"loss": 0.2959, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.3617669459253618, |
|
"grad_norm": 0.32516226172447205, |
|
"learning_rate": 5.517314033368031e-06, |
|
"loss": 0.3239, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.370906321401371, |
|
"grad_norm": 0.32933881878852844, |
|
"learning_rate": 5.37461709759165e-06, |
|
"loss": 0.3288, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.3838537699923839, |
|
"grad_norm": 0.29292726516723633, |
|
"learning_rate": 5.233108461536749e-06, |
|
"loss": 0.3226, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.392993145468393, |
|
"grad_norm": 0.3006382882595062, |
|
"learning_rate": 5.092824479960625e-06, |
|
"loss": 0.3039, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.4021325209444022, |
|
"grad_norm": 0.29254230856895447, |
|
"learning_rate": 4.9538011929965436e-06, |
|
"loss": 0.3441, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.4112718964204114, |
|
"grad_norm": 0.30929845571517944, |
|
"learning_rate": 4.81607431689475e-06, |
|
"loss": 0.3127, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.4204112718964204, |
|
"grad_norm": 0.28565043210983276, |
|
"learning_rate": 4.679679234846636e-06, |
|
"loss": 0.3119, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.4295506473724295, |
|
"grad_norm": 0.3046690821647644, |
|
"learning_rate": 4.544650987894514e-06, |
|
"loss": 0.2909, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.4386900228484387, |
|
"grad_norm": 0.279197096824646, |
|
"learning_rate": 4.411024265929283e-06, |
|
"loss": 0.3118, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.447829398324448, |
|
"grad_norm": 0.29681769013404846, |
|
"learning_rate": 4.278833398778306e-06, |
|
"loss": 0.3122, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.4569687738004569, |
|
"grad_norm": 0.28999167680740356, |
|
"learning_rate": 4.148112347385762e-06, |
|
"loss": 0.3073, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.466108149276466, |
|
"grad_norm": 0.3117202818393707, |
|
"learning_rate": 4.01889469508784e-06, |
|
"loss": 0.3206, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.4752475247524752, |
|
"grad_norm": 0.28692707419395447, |
|
"learning_rate": 3.891213638984858e-06, |
|
"loss": 0.3147, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.4843869002284844, |
|
"grad_norm": 0.2778036296367645, |
|
"learning_rate": 3.7651019814126656e-06, |
|
"loss": 0.3249, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.4935262757044936, |
|
"grad_norm": 0.30285775661468506, |
|
"learning_rate": 3.6405921215154492e-06, |
|
"loss": 0.3121, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.5026656511805028, |
|
"grad_norm": 0.3020969033241272, |
|
"learning_rate": 3.5177160469221184e-06, |
|
"loss": 0.3003, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.511805026656512, |
|
"grad_norm": 0.2875075340270996, |
|
"learning_rate": 3.3965053255284085e-06, |
|
"loss": 0.3115, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.520944402132521, |
|
"grad_norm": 0.3074340522289276, |
|
"learning_rate": 3.2769910973868314e-06, |
|
"loss": 0.3131, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.53008377760853, |
|
"grad_norm": 0.2820333242416382, |
|
"learning_rate": 3.1592040667065393e-06, |
|
"loss": 0.2978, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.5392231530845393, |
|
"grad_norm": 0.3219679296016693, |
|
"learning_rate": 3.0431744939651365e-06, |
|
"loss": 0.3075, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.5483625285605482, |
|
"grad_norm": 0.2894546389579773, |
|
"learning_rate": 2.9289321881345257e-06, |
|
"loss": 0.3107, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.5575019040365574, |
|
"grad_norm": 0.33814314007759094, |
|
"learning_rate": 2.8165064990227255e-06, |
|
"loss": 0.3338, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.5666412795125666, |
|
"grad_norm": 0.330655962228775, |
|
"learning_rate": 2.7059263097336595e-06, |
|
"loss": 0.3184, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.5757806549885758, |
|
"grad_norm": 0.29267704486846924, |
|
"learning_rate": 2.597220029246846e-06, |
|
"loss": 0.3241, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.584920030464585, |
|
"grad_norm": 0.30491986870765686, |
|
"learning_rate": 2.490415585118887e-06, |
|
"loss": 0.3033, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.5940594059405941, |
|
"grad_norm": 0.30731192231178284, |
|
"learning_rate": 2.3855404163086558e-06, |
|
"loss": 0.2965, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.6031987814166033, |
|
"grad_norm": 0.2904992699623108, |
|
"learning_rate": 2.282621466127982e-06, |
|
"loss": 0.3046, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.6123381568926123, |
|
"grad_norm": 0.31819531321525574, |
|
"learning_rate": 2.1816851753197023e-06, |
|
"loss": 0.2998, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.6214775323686215, |
|
"grad_norm": 0.30998897552490234, |
|
"learning_rate": 2.082757475264804e-06, |
|
"loss": 0.3047, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.6306169078446306, |
|
"grad_norm": 0.2906753718852997, |
|
"learning_rate": 1.9858637813204352e-06, |
|
"loss": 0.2906, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.6397562833206396, |
|
"grad_norm": 0.29853326082229614, |
|
"learning_rate": 1.8910289862904917e-06, |
|
"loss": 0.3223, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.6488956587966488, |
|
"grad_norm": 0.31702253222465515, |
|
"learning_rate": 1.7982774540304404e-06, |
|
"loss": 0.3286, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.658035034272658, |
|
"grad_norm": 0.2962207794189453, |
|
"learning_rate": 1.7076330131880525e-06, |
|
"loss": 0.3162, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.6671744097486672, |
|
"grad_norm": 0.2774161100387573, |
|
"learning_rate": 1.6191189510815942e-06, |
|
"loss": 0.3147, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.6763137852246763, |
|
"grad_norm": 0.2778419852256775, |
|
"learning_rate": 1.5327580077171589e-06, |
|
"loss": 0.3086, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.6854531607006855, |
|
"grad_norm": 0.31520137190818787, |
|
"learning_rate": 1.4485723699465392e-06, |
|
"loss": 0.3259, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.6945925361766947, |
|
"grad_norm": 0.29337337613105774, |
|
"learning_rate": 1.3665836657672493e-06, |
|
"loss": 0.3126, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.7037319116527039, |
|
"grad_norm": 0.3039644956588745, |
|
"learning_rate": 1.286812958766106e-06, |
|
"loss": 0.3194, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.7128712871287128, |
|
"grad_norm": 0.31187689304351807, |
|
"learning_rate": 1.209280742707828e-06, |
|
"loss": 0.3051, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.722010662604722, |
|
"grad_norm": 0.30652114748954773, |
|
"learning_rate": 1.134006936269999e-06, |
|
"loss": 0.3267, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.7311500380807312, |
|
"grad_norm": 0.33868396282196045, |
|
"learning_rate": 1.0610108779258043e-06, |
|
"loss": 0.3171, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.7402894135567402, |
|
"grad_norm": 0.3216879963874817, |
|
"learning_rate": 9.903113209758098e-07, |
|
"loss": 0.2924, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.7494287890327493, |
|
"grad_norm": 0.2990855872631073, |
|
"learning_rate": 9.2192642873008e-07, |
|
"loss": 0.3193, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.7585681645087585, |
|
"grad_norm": 0.2846771478652954, |
|
"learning_rate": 8.558737698418762e-07, |
|
"loss": 0.2915, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.7677075399847677, |
|
"grad_norm": 0.30743762850761414, |
|
"learning_rate": 7.921703137941172e-07, |
|
"loss": 0.3139, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.7768469154607769, |
|
"grad_norm": 0.30846983194351196, |
|
"learning_rate": 7.308324265397837e-07, |
|
"loss": 0.31, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.785986290936786, |
|
"grad_norm": 0.26738885045051575, |
|
"learning_rate": 6.718758662973524e-07, |
|
"loss": 0.3264, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.7951256664127953, |
|
"grad_norm": 0.3010248839855194, |
|
"learning_rate": 6.153157795023956e-07, |
|
"loss": 0.3187, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.8042650418888042, |
|
"grad_norm": 0.29657527804374695, |
|
"learning_rate": 5.611666969163243e-07, |
|
"loss": 0.3249, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.8134044173648134, |
|
"grad_norm": 0.29564720392227173, |
|
"learning_rate": 5.094425298933136e-07, |
|
"loss": 0.3174, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.8225437928408226, |
|
"grad_norm": 0.2934928834438324, |
|
"learning_rate": 4.6015656680636234e-07, |
|
"loss": 0.3278, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.8316831683168315, |
|
"grad_norm": 0.2969776690006256, |
|
"learning_rate": 4.133214696333943e-07, |
|
"loss": 0.3167, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.8408225437928407, |
|
"grad_norm": 0.3145069181919098, |
|
"learning_rate": 3.6894927070429744e-07, |
|
"loss": 0.3189, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.84996191926885, |
|
"grad_norm": 0.2936561703681946, |
|
"learning_rate": 3.2705136960970554e-07, |
|
"loss": 0.3153, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.859101294744859, |
|
"grad_norm": 0.3092540502548218, |
|
"learning_rate": 2.8763853027236277e-07, |
|
"loss": 0.3178, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.8682406702208683, |
|
"grad_norm": 0.3078237473964691, |
|
"learning_rate": 2.507208781817638e-07, |
|
"loss": 0.3042, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.8773800456968774, |
|
"grad_norm": 0.31245988607406616, |
|
"learning_rate": 2.1630789779284677e-07, |
|
"loss": 0.3121, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.8865194211728866, |
|
"grad_norm": 0.29274481534957886, |
|
"learning_rate": 1.844084300893456e-07, |
|
"loss": 0.3083, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.8956587966488958, |
|
"grad_norm": 0.27361243963241577, |
|
"learning_rate": 1.55030670312476e-07, |
|
"loss": 0.3198, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.9047981721249048, |
|
"grad_norm": 0.28884297609329224, |
|
"learning_rate": 1.2818216585549824e-07, |
|
"loss": 0.3137, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.913937547600914, |
|
"grad_norm": 0.3390906751155853, |
|
"learning_rate": 1.0386981432474075e-07, |
|
"loss": 0.3199, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.9230769230769231, |
|
"grad_norm": 0.3025045096874237, |
|
"learning_rate": 8.209986176753947e-08, |
|
"loss": 0.3091, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.932216298552932, |
|
"grad_norm": 0.30933359265327454, |
|
"learning_rate": 6.287790106757396e-08, |
|
"loss": 0.3267, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.9413556740289413, |
|
"grad_norm": 0.3036198914051056, |
|
"learning_rate": 4.6208870508017703e-08, |
|
"loss": 0.3084, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.9504950495049505, |
|
"grad_norm": 0.2938263416290283, |
|
"learning_rate": 3.2097052502843005e-08, |
|
"loss": 0.308, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.9596344249809596, |
|
"grad_norm": 0.2950015664100647, |
|
"learning_rate": 2.054607249663665e-08, |
|
"loss": 0.2959, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.9687738004569688, |
|
"grad_norm": 0.28699854016304016, |
|
"learning_rate": 1.1558898033191545e-08, |
|
"loss": 0.325, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.977913175932978, |
|
"grad_norm": 0.3103504478931427, |
|
"learning_rate": 5.137837993121064e-09, |
|
"loss": 0.3208, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.9870525514089872, |
|
"grad_norm": 0.30485469102859497, |
|
"learning_rate": 1.2845420006879494e-09, |
|
"loss": 0.3123, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.9961919268849961, |
|
"grad_norm": 0.3055669367313385, |
|
"learning_rate": 0.0, |
|
"loss": 0.3137, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.9961919268849961, |
|
"step": 218, |
|
"total_flos": 418179526819840.0, |
|
"train_loss": 0.09776416790047916, |
|
"train_runtime": 5460.3908, |
|
"train_samples_per_second": 3.846, |
|
"train_steps_per_second": 0.04 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 218, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 418179526819840.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|