Bespoke-Stratos-32B / trainer_state.json
ryanmarten's picture
Upload model
72f4b89 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.998563906175203,
"eval_steps": 500,
"global_step": 522,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0057443752991862135,
"grad_norm": 2.7811572551727295,
"learning_rate": 1.886792452830189e-07,
"loss": 0.7552,
"step": 1
},
{
"epoch": 0.011488750598372427,
"grad_norm": 2.7668187618255615,
"learning_rate": 3.773584905660378e-07,
"loss": 0.7816,
"step": 2
},
{
"epoch": 0.01723312589755864,
"grad_norm": 2.714698314666748,
"learning_rate": 5.660377358490567e-07,
"loss": 0.8084,
"step": 3
},
{
"epoch": 0.022977501196744854,
"grad_norm": 2.6264641284942627,
"learning_rate": 7.547169811320755e-07,
"loss": 0.7774,
"step": 4
},
{
"epoch": 0.028721876495931067,
"grad_norm": 2.5013437271118164,
"learning_rate": 9.433962264150944e-07,
"loss": 0.7726,
"step": 5
},
{
"epoch": 0.03446625179511728,
"grad_norm": 2.7753398418426514,
"learning_rate": 1.1320754716981133e-06,
"loss": 0.7708,
"step": 6
},
{
"epoch": 0.040210627094303494,
"grad_norm": 2.3244540691375732,
"learning_rate": 1.3207547169811322e-06,
"loss": 0.7759,
"step": 7
},
{
"epoch": 0.04595500239348971,
"grad_norm": 1.9560602903366089,
"learning_rate": 1.509433962264151e-06,
"loss": 0.7493,
"step": 8
},
{
"epoch": 0.05169937769267592,
"grad_norm": 1.9135451316833496,
"learning_rate": 1.6981132075471698e-06,
"loss": 0.7216,
"step": 9
},
{
"epoch": 0.057443752991862135,
"grad_norm": 1.8730158805847168,
"learning_rate": 1.8867924528301889e-06,
"loss": 0.7732,
"step": 10
},
{
"epoch": 0.06318812829104835,
"grad_norm": 1.520505666732788,
"learning_rate": 2.075471698113208e-06,
"loss": 0.7432,
"step": 11
},
{
"epoch": 0.06893250359023456,
"grad_norm": 1.342366099357605,
"learning_rate": 2.2641509433962266e-06,
"loss": 0.6762,
"step": 12
},
{
"epoch": 0.07467687888942078,
"grad_norm": 1.3205207586288452,
"learning_rate": 2.4528301886792453e-06,
"loss": 0.7395,
"step": 13
},
{
"epoch": 0.08042125418860699,
"grad_norm": 1.3457564115524292,
"learning_rate": 2.6415094339622644e-06,
"loss": 0.7279,
"step": 14
},
{
"epoch": 0.0861656294877932,
"grad_norm": 1.1626056432724,
"learning_rate": 2.830188679245283e-06,
"loss": 0.7218,
"step": 15
},
{
"epoch": 0.09191000478697942,
"grad_norm": 1.1689456701278687,
"learning_rate": 3.018867924528302e-06,
"loss": 0.7202,
"step": 16
},
{
"epoch": 0.09765438008616563,
"grad_norm": 1.100240707397461,
"learning_rate": 3.207547169811321e-06,
"loss": 0.6957,
"step": 17
},
{
"epoch": 0.10339875538535184,
"grad_norm": 0.9969179630279541,
"learning_rate": 3.3962264150943395e-06,
"loss": 0.695,
"step": 18
},
{
"epoch": 0.10914313068453806,
"grad_norm": 0.8645837306976318,
"learning_rate": 3.5849056603773586e-06,
"loss": 0.6393,
"step": 19
},
{
"epoch": 0.11488750598372427,
"grad_norm": 0.726235568523407,
"learning_rate": 3.7735849056603777e-06,
"loss": 0.6471,
"step": 20
},
{
"epoch": 0.12063188128291048,
"grad_norm": 0.7744720578193665,
"learning_rate": 3.962264150943396e-06,
"loss": 0.6303,
"step": 21
},
{
"epoch": 0.1263762565820967,
"grad_norm": 0.8271118998527527,
"learning_rate": 4.150943396226416e-06,
"loss": 0.6215,
"step": 22
},
{
"epoch": 0.13212063188128292,
"grad_norm": 0.7437446713447571,
"learning_rate": 4.339622641509435e-06,
"loss": 0.5848,
"step": 23
},
{
"epoch": 0.13786500718046912,
"grad_norm": 0.6880975365638733,
"learning_rate": 4.528301886792453e-06,
"loss": 0.6208,
"step": 24
},
{
"epoch": 0.14360938247965535,
"grad_norm": 0.6708999872207642,
"learning_rate": 4.716981132075472e-06,
"loss": 0.6067,
"step": 25
},
{
"epoch": 0.14935375777884155,
"grad_norm": 0.5787381529808044,
"learning_rate": 4.905660377358491e-06,
"loss": 0.5981,
"step": 26
},
{
"epoch": 0.15509813307802778,
"grad_norm": 0.6275501847267151,
"learning_rate": 5.09433962264151e-06,
"loss": 0.6233,
"step": 27
},
{
"epoch": 0.16084250837721398,
"grad_norm": 0.6212226748466492,
"learning_rate": 5.283018867924529e-06,
"loss": 0.5856,
"step": 28
},
{
"epoch": 0.1665868836764002,
"grad_norm": 0.5922460556030273,
"learning_rate": 5.4716981132075475e-06,
"loss": 0.5698,
"step": 29
},
{
"epoch": 0.1723312589755864,
"grad_norm": 0.5896409749984741,
"learning_rate": 5.660377358490566e-06,
"loss": 0.5768,
"step": 30
},
{
"epoch": 0.17807563427477263,
"grad_norm": 0.5254840850830078,
"learning_rate": 5.849056603773585e-06,
"loss": 0.539,
"step": 31
},
{
"epoch": 0.18382000957395883,
"grad_norm": 0.5485825538635254,
"learning_rate": 6.037735849056604e-06,
"loss": 0.5176,
"step": 32
},
{
"epoch": 0.18956438487314506,
"grad_norm": 0.5224868655204773,
"learning_rate": 6.226415094339623e-06,
"loss": 0.5541,
"step": 33
},
{
"epoch": 0.19530876017233126,
"grad_norm": 0.521843433380127,
"learning_rate": 6.415094339622642e-06,
"loss": 0.5707,
"step": 34
},
{
"epoch": 0.20105313547151749,
"grad_norm": 0.5119784474372864,
"learning_rate": 6.60377358490566e-06,
"loss": 0.5567,
"step": 35
},
{
"epoch": 0.20679751077070369,
"grad_norm": 0.5173502564430237,
"learning_rate": 6.792452830188679e-06,
"loss": 0.5481,
"step": 36
},
{
"epoch": 0.2125418860698899,
"grad_norm": 0.41040942072868347,
"learning_rate": 6.981132075471699e-06,
"loss": 0.5361,
"step": 37
},
{
"epoch": 0.2182862613690761,
"grad_norm": 0.4279431104660034,
"learning_rate": 7.169811320754717e-06,
"loss": 0.5454,
"step": 38
},
{
"epoch": 0.22403063666826234,
"grad_norm": 0.4288845956325531,
"learning_rate": 7.358490566037736e-06,
"loss": 0.5459,
"step": 39
},
{
"epoch": 0.22977501196744854,
"grad_norm": 0.4141804277896881,
"learning_rate": 7.5471698113207555e-06,
"loss": 0.5267,
"step": 40
},
{
"epoch": 0.23551938726663477,
"grad_norm": 4.088698387145996,
"learning_rate": 7.735849056603775e-06,
"loss": 0.5617,
"step": 41
},
{
"epoch": 0.24126376256582097,
"grad_norm": 0.40810075402259827,
"learning_rate": 7.924528301886793e-06,
"loss": 0.5066,
"step": 42
},
{
"epoch": 0.2470081378650072,
"grad_norm": 0.4773496687412262,
"learning_rate": 8.113207547169812e-06,
"loss": 0.5375,
"step": 43
},
{
"epoch": 0.2527525131641934,
"grad_norm": 0.3834821581840515,
"learning_rate": 8.301886792452832e-06,
"loss": 0.5502,
"step": 44
},
{
"epoch": 0.2584968884633796,
"grad_norm": 0.38557955622673035,
"learning_rate": 8.49056603773585e-06,
"loss": 0.5338,
"step": 45
},
{
"epoch": 0.26424126376256585,
"grad_norm": 0.3510483503341675,
"learning_rate": 8.67924528301887e-06,
"loss": 0.4924,
"step": 46
},
{
"epoch": 0.26998563906175205,
"grad_norm": 0.3511549234390259,
"learning_rate": 8.867924528301887e-06,
"loss": 0.5186,
"step": 47
},
{
"epoch": 0.27573001436093825,
"grad_norm": 0.41062384843826294,
"learning_rate": 9.056603773584907e-06,
"loss": 0.5021,
"step": 48
},
{
"epoch": 0.28147438966012445,
"grad_norm": 0.3798637092113495,
"learning_rate": 9.245283018867926e-06,
"loss": 0.5338,
"step": 49
},
{
"epoch": 0.2872187649593107,
"grad_norm": 0.35235193371772766,
"learning_rate": 9.433962264150944e-06,
"loss": 0.4922,
"step": 50
},
{
"epoch": 0.2929631402584969,
"grad_norm": 0.3465921878814697,
"learning_rate": 9.622641509433963e-06,
"loss": 0.4825,
"step": 51
},
{
"epoch": 0.2987075155576831,
"grad_norm": 0.36125242710113525,
"learning_rate": 9.811320754716981e-06,
"loss": 0.5178,
"step": 52
},
{
"epoch": 0.3044518908568693,
"grad_norm": 0.3282877504825592,
"learning_rate": 1e-05,
"loss": 0.494,
"step": 53
},
{
"epoch": 0.31019626615605556,
"grad_norm": 0.3224699795246124,
"learning_rate": 9.999887825938495e-06,
"loss": 0.4878,
"step": 54
},
{
"epoch": 0.31594064145524176,
"grad_norm": 0.36641696095466614,
"learning_rate": 9.999551308787183e-06,
"loss": 0.4905,
"step": 55
},
{
"epoch": 0.32168501675442795,
"grad_norm": 0.321207195520401,
"learning_rate": 9.998990463645464e-06,
"loss": 0.4956,
"step": 56
},
{
"epoch": 0.32742939205361415,
"grad_norm": 0.32767999172210693,
"learning_rate": 9.998205315678248e-06,
"loss": 0.5046,
"step": 57
},
{
"epoch": 0.3331737673528004,
"grad_norm": 0.3131692409515381,
"learning_rate": 9.997195900114833e-06,
"loss": 0.4961,
"step": 58
},
{
"epoch": 0.3389181426519866,
"grad_norm": 0.304837167263031,
"learning_rate": 9.995962262247314e-06,
"loss": 0.4712,
"step": 59
},
{
"epoch": 0.3446625179511728,
"grad_norm": 0.3071947395801544,
"learning_rate": 9.994504457428557e-06,
"loss": 0.4851,
"step": 60
},
{
"epoch": 0.350406893250359,
"grad_norm": 0.344100683927536,
"learning_rate": 9.99282255106972e-06,
"loss": 0.4947,
"step": 61
},
{
"epoch": 0.35615126854954526,
"grad_norm": 0.32712873816490173,
"learning_rate": 9.99091661863731e-06,
"loss": 0.502,
"step": 62
},
{
"epoch": 0.36189564384873146,
"grad_norm": 0.2983171045780182,
"learning_rate": 9.988786745649798e-06,
"loss": 0.4892,
"step": 63
},
{
"epoch": 0.36764001914791766,
"grad_norm": 0.45408669114112854,
"learning_rate": 9.986433027673786e-06,
"loss": 0.4738,
"step": 64
},
{
"epoch": 0.37338439444710386,
"grad_norm": 0.34534087777137756,
"learning_rate": 9.983855570319716e-06,
"loss": 0.4915,
"step": 65
},
{
"epoch": 0.3791287697462901,
"grad_norm": 0.32527977228164673,
"learning_rate": 9.981054489237132e-06,
"loss": 0.4849,
"step": 66
},
{
"epoch": 0.3848731450454763,
"grad_norm": 0.2880174219608307,
"learning_rate": 9.978029910109491e-06,
"loss": 0.4474,
"step": 67
},
{
"epoch": 0.3906175203446625,
"grad_norm": 0.3428208529949188,
"learning_rate": 9.974781968648523e-06,
"loss": 0.4731,
"step": 68
},
{
"epoch": 0.3963618956438487,
"grad_norm": 0.33169832825660706,
"learning_rate": 9.971310810588141e-06,
"loss": 0.4951,
"step": 69
},
{
"epoch": 0.40210627094303497,
"grad_norm": 0.3041759729385376,
"learning_rate": 9.967616591677906e-06,
"loss": 0.4278,
"step": 70
},
{
"epoch": 0.40785064624222117,
"grad_norm": 0.3041941225528717,
"learning_rate": 9.963699477676031e-06,
"loss": 0.4755,
"step": 71
},
{
"epoch": 0.41359502154140737,
"grad_norm": 0.30077141523361206,
"learning_rate": 9.959559644341954e-06,
"loss": 0.4572,
"step": 72
},
{
"epoch": 0.41933939684059357,
"grad_norm": 0.36518850922584534,
"learning_rate": 9.95519727742844e-06,
"loss": 0.4752,
"step": 73
},
{
"epoch": 0.4250837721397798,
"grad_norm": 0.2795243263244629,
"learning_rate": 9.950612572673255e-06,
"loss": 0.4759,
"step": 74
},
{
"epoch": 0.430828147438966,
"grad_norm": 0.30531245470046997,
"learning_rate": 9.945805735790383e-06,
"loss": 0.4897,
"step": 75
},
{
"epoch": 0.4365725227381522,
"grad_norm": 0.31547942757606506,
"learning_rate": 9.940776982460787e-06,
"loss": 0.466,
"step": 76
},
{
"epoch": 0.4423168980373384,
"grad_norm": 0.276068776845932,
"learning_rate": 9.935526538322744e-06,
"loss": 0.4882,
"step": 77
},
{
"epoch": 0.4480612733365247,
"grad_norm": 0.34043920040130615,
"learning_rate": 9.930054638961709e-06,
"loss": 0.4685,
"step": 78
},
{
"epoch": 0.4538056486357109,
"grad_norm": 0.2898786664009094,
"learning_rate": 9.924361529899754e-06,
"loss": 0.4756,
"step": 79
},
{
"epoch": 0.4595500239348971,
"grad_norm": 0.270378977060318,
"learning_rate": 9.918447466584545e-06,
"loss": 0.4599,
"step": 80
},
{
"epoch": 0.4652943992340833,
"grad_norm": 0.35345959663391113,
"learning_rate": 9.91231271437788e-06,
"loss": 0.4629,
"step": 81
},
{
"epoch": 0.47103877453326953,
"grad_norm": 0.3456457555294037,
"learning_rate": 9.905957548543794e-06,
"loss": 0.481,
"step": 82
},
{
"epoch": 0.47678314983245573,
"grad_norm": 0.2923058271408081,
"learning_rate": 9.899382254236186e-06,
"loss": 0.4679,
"step": 83
},
{
"epoch": 0.48252752513164193,
"grad_norm": 0.3531160354614258,
"learning_rate": 9.892587126486046e-06,
"loss": 0.4906,
"step": 84
},
{
"epoch": 0.48827190043082813,
"grad_norm": 0.36479318141937256,
"learning_rate": 9.885572470188207e-06,
"loss": 0.4569,
"step": 85
},
{
"epoch": 0.4940162757300144,
"grad_norm": 0.33483925461769104,
"learning_rate": 9.878338600087658e-06,
"loss": 0.4668,
"step": 86
},
{
"epoch": 0.4997606510292006,
"grad_norm": 0.30246540904045105,
"learning_rate": 9.87088584076544e-06,
"loss": 0.4847,
"step": 87
},
{
"epoch": 0.5055050263283868,
"grad_norm": 0.3419553339481354,
"learning_rate": 9.863214526624065e-06,
"loss": 0.4698,
"step": 88
},
{
"epoch": 0.511249401627573,
"grad_norm": 0.30689287185668945,
"learning_rate": 9.85532500187252e-06,
"loss": 0.4646,
"step": 89
},
{
"epoch": 0.5169937769267592,
"grad_norm": 0.36106929183006287,
"learning_rate": 9.847217620510815e-06,
"loss": 0.4746,
"step": 90
},
{
"epoch": 0.5227381522259454,
"grad_norm": 0.29505378007888794,
"learning_rate": 9.83889274631411e-06,
"loss": 0.4571,
"step": 91
},
{
"epoch": 0.5284825275251317,
"grad_norm": 0.29619649052619934,
"learning_rate": 9.830350752816386e-06,
"loss": 0.4578,
"step": 92
},
{
"epoch": 0.5342269028243178,
"grad_norm": 0.3188607692718506,
"learning_rate": 9.821592023293686e-06,
"loss": 0.4508,
"step": 93
},
{
"epoch": 0.5399712781235041,
"grad_norm": 0.2994546592235565,
"learning_rate": 9.81261695074691e-06,
"loss": 0.4774,
"step": 94
},
{
"epoch": 0.5457156534226902,
"grad_norm": 0.27736401557922363,
"learning_rate": 9.803425937884202e-06,
"loss": 0.4627,
"step": 95
},
{
"epoch": 0.5514600287218765,
"grad_norm": 0.313815176486969,
"learning_rate": 9.794019397102852e-06,
"loss": 0.4753,
"step": 96
},
{
"epoch": 0.5572044040210627,
"grad_norm": 0.3198351562023163,
"learning_rate": 9.784397750470818e-06,
"loss": 0.45,
"step": 97
},
{
"epoch": 0.5629487793202489,
"grad_norm": 0.3304007351398468,
"learning_rate": 9.774561429707769e-06,
"loss": 0.4879,
"step": 98
},
{
"epoch": 0.5686931546194351,
"grad_norm": 0.2905180752277374,
"learning_rate": 9.764510876165727e-06,
"loss": 0.448,
"step": 99
},
{
"epoch": 0.5744375299186214,
"grad_norm": 0.31816208362579346,
"learning_rate": 9.754246540809257e-06,
"loss": 0.4483,
"step": 100
},
{
"epoch": 0.5801819052178075,
"grad_norm": 0.3271932005882263,
"learning_rate": 9.743768884195233e-06,
"loss": 0.4772,
"step": 101
},
{
"epoch": 0.5859262805169938,
"grad_norm": 0.3400787115097046,
"learning_rate": 9.733078376452172e-06,
"loss": 0.4762,
"step": 102
},
{
"epoch": 0.59167065581618,
"grad_norm": 0.3152044415473938,
"learning_rate": 9.722175497259145e-06,
"loss": 0.4625,
"step": 103
},
{
"epoch": 0.5974150311153662,
"grad_norm": 0.29500487446784973,
"learning_rate": 9.71106073582425e-06,
"loss": 0.4513,
"step": 104
},
{
"epoch": 0.6031594064145525,
"grad_norm": 0.30806249380111694,
"learning_rate": 9.699734590862655e-06,
"loss": 0.4493,
"step": 105
},
{
"epoch": 0.6089037817137386,
"grad_norm": 0.32135871052742004,
"learning_rate": 9.688197570574238e-06,
"loss": 0.4687,
"step": 106
},
{
"epoch": 0.6146481570129249,
"grad_norm": 0.2976573705673218,
"learning_rate": 9.676450192620767e-06,
"loss": 0.464,
"step": 107
},
{
"epoch": 0.6203925323121111,
"grad_norm": 0.29863908886909485,
"learning_rate": 9.66449298410268e-06,
"loss": 0.4816,
"step": 108
},
{
"epoch": 0.6261369076112973,
"grad_norm": 0.31702297925949097,
"learning_rate": 9.652326481535434e-06,
"loss": 0.4278,
"step": 109
},
{
"epoch": 0.6318812829104835,
"grad_norm": 0.3526081442832947,
"learning_rate": 9.639951230825433e-06,
"loss": 0.4931,
"step": 110
},
{
"epoch": 0.6376256582096697,
"grad_norm": 0.33451569080352783,
"learning_rate": 9.62736778724553e-06,
"loss": 0.475,
"step": 111
},
{
"epoch": 0.6433700335088559,
"grad_norm": 0.33537557721138,
"learning_rate": 9.614576715410116e-06,
"loss": 0.4799,
"step": 112
},
{
"epoch": 0.6491144088080422,
"grad_norm": 0.3408398926258087,
"learning_rate": 9.60157858924978e-06,
"loss": 0.4483,
"step": 113
},
{
"epoch": 0.6548587841072283,
"grad_norm": 0.3780161440372467,
"learning_rate": 9.588373991985566e-06,
"loss": 0.4684,
"step": 114
},
{
"epoch": 0.6606031594064146,
"grad_norm": 0.35523924231529236,
"learning_rate": 9.574963516102795e-06,
"loss": 0.4783,
"step": 115
},
{
"epoch": 0.6663475347056008,
"grad_norm": 0.35043102502822876,
"learning_rate": 9.561347763324484e-06,
"loss": 0.4462,
"step": 116
},
{
"epoch": 0.672091910004787,
"grad_norm": 0.388933390378952,
"learning_rate": 9.547527344584353e-06,
"loss": 0.4617,
"step": 117
},
{
"epoch": 0.6778362853039732,
"grad_norm": 0.32936355471611023,
"learning_rate": 9.533502879999398e-06,
"loss": 0.48,
"step": 118
},
{
"epoch": 0.6835806606031594,
"grad_norm": 0.3353422284126282,
"learning_rate": 9.519274998842084e-06,
"loss": 0.4832,
"step": 119
},
{
"epoch": 0.6893250359023456,
"grad_norm": 0.36188051104545593,
"learning_rate": 9.504844339512096e-06,
"loss": 0.4527,
"step": 120
},
{
"epoch": 0.6950694112015319,
"grad_norm": 0.30944687128067017,
"learning_rate": 9.490211549507701e-06,
"loss": 0.4712,
"step": 121
},
{
"epoch": 0.700813786500718,
"grad_norm": 0.3265434801578522,
"learning_rate": 9.475377285396692e-06,
"loss": 0.4505,
"step": 122
},
{
"epoch": 0.7065581617999043,
"grad_norm": 0.3855360150337219,
"learning_rate": 9.460342212786933e-06,
"loss": 0.4417,
"step": 123
},
{
"epoch": 0.7123025370990905,
"grad_norm": 0.32236477732658386,
"learning_rate": 9.445107006296488e-06,
"loss": 0.4765,
"step": 124
},
{
"epoch": 0.7180469123982767,
"grad_norm": 0.32656142115592957,
"learning_rate": 9.42967234952335e-06,
"loss": 0.4829,
"step": 125
},
{
"epoch": 0.7237912876974629,
"grad_norm": 0.3501734435558319,
"learning_rate": 9.414038935014777e-06,
"loss": 0.4471,
"step": 126
},
{
"epoch": 0.7295356629966491,
"grad_norm": 0.32487544417381287,
"learning_rate": 9.398207464236209e-06,
"loss": 0.4483,
"step": 127
},
{
"epoch": 0.7352800382958353,
"grad_norm": 0.31272241473197937,
"learning_rate": 9.382178647539794e-06,
"loss": 0.4545,
"step": 128
},
{
"epoch": 0.7410244135950216,
"grad_norm": 0.3410162031650543,
"learning_rate": 9.365953204132526e-06,
"loss": 0.4642,
"step": 129
},
{
"epoch": 0.7467687888942077,
"grad_norm": 0.327289342880249,
"learning_rate": 9.349531862043952e-06,
"loss": 0.4794,
"step": 130
},
{
"epoch": 0.752513164193394,
"grad_norm": 0.3137280344963074,
"learning_rate": 9.332915358093532e-06,
"loss": 0.4453,
"step": 131
},
{
"epoch": 0.7582575394925802,
"grad_norm": 0.3067407011985779,
"learning_rate": 9.316104437857561e-06,
"loss": 0.4506,
"step": 132
},
{
"epoch": 0.7640019147917664,
"grad_norm": 0.3284437656402588,
"learning_rate": 9.299099855635716e-06,
"loss": 0.4443,
"step": 133
},
{
"epoch": 0.7697462900909526,
"grad_norm": 0.4092399477958679,
"learning_rate": 9.28190237441722e-06,
"loss": 0.4351,
"step": 134
},
{
"epoch": 0.7754906653901388,
"grad_norm": 0.3092726469039917,
"learning_rate": 9.2645127658466e-06,
"loss": 0.4532,
"step": 135
},
{
"epoch": 0.781235040689325,
"grad_norm": 0.34482482075691223,
"learning_rate": 9.246931810189061e-06,
"loss": 0.4626,
"step": 136
},
{
"epoch": 0.7869794159885113,
"grad_norm": 0.3547237813472748,
"learning_rate": 9.229160296295488e-06,
"loss": 0.4305,
"step": 137
},
{
"epoch": 0.7927237912876974,
"grad_norm": 0.32667168974876404,
"learning_rate": 9.211199021567034e-06,
"loss": 0.4675,
"step": 138
},
{
"epoch": 0.7984681665868837,
"grad_norm": 0.3909010589122772,
"learning_rate": 9.193048791919357e-06,
"loss": 0.4743,
"step": 139
},
{
"epoch": 0.8042125418860699,
"grad_norm": 0.31883516907691956,
"learning_rate": 9.174710421746445e-06,
"loss": 0.4824,
"step": 140
},
{
"epoch": 0.8099569171852561,
"grad_norm": 0.36612772941589355,
"learning_rate": 9.156184733884084e-06,
"loss": 0.4475,
"step": 141
},
{
"epoch": 0.8157012924844423,
"grad_norm": 0.32956618070602417,
"learning_rate": 9.137472559572935e-06,
"loss": 0.4762,
"step": 142
},
{
"epoch": 0.8214456677836285,
"grad_norm": 0.3318566679954529,
"learning_rate": 9.118574738421236e-06,
"loss": 0.4381,
"step": 143
},
{
"epoch": 0.8271900430828147,
"grad_norm": 0.34344008564949036,
"learning_rate": 9.099492118367123e-06,
"loss": 0.4693,
"step": 144
},
{
"epoch": 0.832934418382001,
"grad_norm": 0.2847115695476532,
"learning_rate": 9.080225555640601e-06,
"loss": 0.4552,
"step": 145
},
{
"epoch": 0.8386787936811871,
"grad_norm": 0.31437164545059204,
"learning_rate": 9.0607759147251e-06,
"loss": 0.457,
"step": 146
},
{
"epoch": 0.8444231689803734,
"grad_norm": 0.2915739417076111,
"learning_rate": 9.04114406831871e-06,
"loss": 0.4318,
"step": 147
},
{
"epoch": 0.8501675442795597,
"grad_norm": 0.32622477412223816,
"learning_rate": 9.021330897295011e-06,
"loss": 0.456,
"step": 148
},
{
"epoch": 0.8559119195787458,
"grad_norm": 0.3252268135547638,
"learning_rate": 9.001337290663548e-06,
"loss": 0.4397,
"step": 149
},
{
"epoch": 0.861656294877932,
"grad_norm": 0.3453269898891449,
"learning_rate": 8.981164145529943e-06,
"loss": 0.4612,
"step": 150
},
{
"epoch": 0.8674006701771182,
"grad_norm": 0.28802141547203064,
"learning_rate": 8.960812367055646e-06,
"loss": 0.473,
"step": 151
},
{
"epoch": 0.8731450454763044,
"grad_norm": 0.27404195070266724,
"learning_rate": 8.940282868417321e-06,
"loss": 0.4133,
"step": 152
},
{
"epoch": 0.8788894207754907,
"grad_norm": 0.3235178589820862,
"learning_rate": 8.91957657076586e-06,
"loss": 0.4658,
"step": 153
},
{
"epoch": 0.8846337960746768,
"grad_norm": 0.29341384768486023,
"learning_rate": 8.898694403185066e-06,
"loss": 0.4287,
"step": 154
},
{
"epoch": 0.8903781713738631,
"grad_norm": 0.335021436214447,
"learning_rate": 8.877637302649962e-06,
"loss": 0.4424,
"step": 155
},
{
"epoch": 0.8961225466730494,
"grad_norm": 0.2819330096244812,
"learning_rate": 8.856406213984743e-06,
"loss": 0.4579,
"step": 156
},
{
"epoch": 0.9018669219722355,
"grad_norm": 0.3036133646965027,
"learning_rate": 8.835002089820387e-06,
"loss": 0.4468,
"step": 157
},
{
"epoch": 0.9076112972714218,
"grad_norm": 0.30535823106765747,
"learning_rate": 8.81342589055191e-06,
"loss": 0.4735,
"step": 158
},
{
"epoch": 0.9133556725706079,
"grad_norm": 0.2687799036502838,
"learning_rate": 8.791678584295276e-06,
"loss": 0.4381,
"step": 159
},
{
"epoch": 0.9191000478697942,
"grad_norm": 0.3096264898777008,
"learning_rate": 8.76976114684395e-06,
"loss": 0.4467,
"step": 160
},
{
"epoch": 0.9248444231689804,
"grad_norm": 0.3003120422363281,
"learning_rate": 8.747674561625121e-06,
"loss": 0.4633,
"step": 161
},
{
"epoch": 0.9305887984681666,
"grad_norm": 0.2911892533302307,
"learning_rate": 8.725419819655582e-06,
"loss": 0.4093,
"step": 162
},
{
"epoch": 0.9363331737673528,
"grad_norm": 0.28195834159851074,
"learning_rate": 8.702997919497247e-06,
"loss": 0.4535,
"step": 163
},
{
"epoch": 0.9420775490665391,
"grad_norm": 0.28224724531173706,
"learning_rate": 8.680409867212359e-06,
"loss": 0.4726,
"step": 164
},
{
"epoch": 0.9478219243657252,
"grad_norm": 0.31931445002555847,
"learning_rate": 8.657656676318346e-06,
"loss": 0.467,
"step": 165
},
{
"epoch": 0.9535662996649115,
"grad_norm": 0.3119199872016907,
"learning_rate": 8.634739367742341e-06,
"loss": 0.4248,
"step": 166
},
{
"epoch": 0.9593106749640976,
"grad_norm": 0.2851564586162567,
"learning_rate": 8.611658969775378e-06,
"loss": 0.4421,
"step": 167
},
{
"epoch": 0.9650550502632839,
"grad_norm": 0.287062406539917,
"learning_rate": 8.588416518026248e-06,
"loss": 0.4379,
"step": 168
},
{
"epoch": 0.9707994255624701,
"grad_norm": 0.29976505041122437,
"learning_rate": 8.565013055375035e-06,
"loss": 0.453,
"step": 169
},
{
"epoch": 0.9765438008616563,
"grad_norm": 0.3060532212257385,
"learning_rate": 8.541449631926325e-06,
"loss": 0.4484,
"step": 170
},
{
"epoch": 0.9822881761608425,
"grad_norm": 0.29888424277305603,
"learning_rate": 8.51772730496208e-06,
"loss": 0.4728,
"step": 171
},
{
"epoch": 0.9880325514600288,
"grad_norm": 0.31030842661857605,
"learning_rate": 8.49384713889421e-06,
"loss": 0.4441,
"step": 172
},
{
"epoch": 0.9937769267592149,
"grad_norm": 0.28488606214523315,
"learning_rate": 8.469810205216795e-06,
"loss": 0.4784,
"step": 173
},
{
"epoch": 0.9995213020584012,
"grad_norm": 0.2805924713611603,
"learning_rate": 8.445617582458033e-06,
"loss": 0.4643,
"step": 174
},
{
"epoch": 1.0052656773575874,
"grad_norm": 0.9323565363883972,
"learning_rate": 8.42127035613182e-06,
"loss": 0.7769,
"step": 175
},
{
"epoch": 1.0110100526567736,
"grad_norm": 0.28666186332702637,
"learning_rate": 8.396769618689064e-06,
"loss": 0.4213,
"step": 176
},
{
"epoch": 1.0167544279559597,
"grad_norm": 0.30998125672340393,
"learning_rate": 8.372116469468654e-06,
"loss": 0.4429,
"step": 177
},
{
"epoch": 1.022498803255146,
"grad_norm": 0.37547767162323,
"learning_rate": 8.347312014648144e-06,
"loss": 0.3927,
"step": 178
},
{
"epoch": 1.0282431785543322,
"grad_norm": 0.31034451723098755,
"learning_rate": 8.32235736719411e-06,
"loss": 0.3735,
"step": 179
},
{
"epoch": 1.0339875538535184,
"grad_norm": 0.4133894443511963,
"learning_rate": 8.297253646812213e-06,
"loss": 0.4508,
"step": 180
},
{
"epoch": 1.0397319291527047,
"grad_norm": 0.3431154787540436,
"learning_rate": 8.272001979896962e-06,
"loss": 0.4064,
"step": 181
},
{
"epoch": 1.0454763044518909,
"grad_norm": 0.31550031900405884,
"learning_rate": 8.246603499481177e-06,
"loss": 0.4075,
"step": 182
},
{
"epoch": 1.051220679751077,
"grad_norm": 0.3756781816482544,
"learning_rate": 8.221059345185136e-06,
"loss": 0.4108,
"step": 183
},
{
"epoch": 1.0569650550502634,
"grad_norm": 0.34178560972213745,
"learning_rate": 8.195370663165455e-06,
"loss": 0.4057,
"step": 184
},
{
"epoch": 1.0627094303494495,
"grad_norm": 0.31101906299591064,
"learning_rate": 8.169538606063647e-06,
"loss": 0.4237,
"step": 185
},
{
"epoch": 1.0684538056486357,
"grad_norm": 0.3273501694202423,
"learning_rate": 8.143564332954426e-06,
"loss": 0.4287,
"step": 186
},
{
"epoch": 1.0741981809478218,
"grad_norm": 0.34117910265922546,
"learning_rate": 8.117449009293668e-06,
"loss": 0.4279,
"step": 187
},
{
"epoch": 1.0799425562470082,
"grad_norm": 0.28906843066215515,
"learning_rate": 8.091193806866147e-06,
"loss": 0.4271,
"step": 188
},
{
"epoch": 1.0856869315461943,
"grad_norm": 0.2958456575870514,
"learning_rate": 8.064799903732936e-06,
"loss": 0.3967,
"step": 189
},
{
"epoch": 1.0914313068453805,
"grad_norm": 0.2881177067756653,
"learning_rate": 8.038268484178566e-06,
"loss": 0.4176,
"step": 190
},
{
"epoch": 1.0971756821445668,
"grad_norm": 0.32246285676956177,
"learning_rate": 8.011600738657865e-06,
"loss": 0.4222,
"step": 191
},
{
"epoch": 1.102920057443753,
"grad_norm": 0.3277246952056885,
"learning_rate": 7.98479786374257e-06,
"loss": 0.3853,
"step": 192
},
{
"epoch": 1.1086644327429391,
"grad_norm": 0.3409973382949829,
"learning_rate": 7.957861062067614e-06,
"loss": 0.4156,
"step": 193
},
{
"epoch": 1.1144088080421255,
"grad_norm": 0.2889862060546875,
"learning_rate": 7.930791542277175e-06,
"loss": 0.4143,
"step": 194
},
{
"epoch": 1.1201531833413116,
"grad_norm": 0.3289618492126465,
"learning_rate": 7.903590518970445e-06,
"loss": 0.4363,
"step": 195
},
{
"epoch": 1.1258975586404978,
"grad_norm": 0.2657482624053955,
"learning_rate": 7.876259212647129e-06,
"loss": 0.3729,
"step": 196
},
{
"epoch": 1.1316419339396842,
"grad_norm": 0.31660833954811096,
"learning_rate": 7.848798849652684e-06,
"loss": 0.4256,
"step": 197
},
{
"epoch": 1.1373863092388703,
"grad_norm": 0.2567324936389923,
"learning_rate": 7.821210662123284e-06,
"loss": 0.3998,
"step": 198
},
{
"epoch": 1.1431306845380564,
"grad_norm": 0.41787901520729065,
"learning_rate": 7.793495887930551e-06,
"loss": 0.3784,
"step": 199
},
{
"epoch": 1.1488750598372426,
"grad_norm": 0.2855132222175598,
"learning_rate": 7.765655770625997e-06,
"loss": 0.3947,
"step": 200
},
{
"epoch": 1.154619435136429,
"grad_norm": 0.3121788799762726,
"learning_rate": 7.737691559385237e-06,
"loss": 0.4236,
"step": 201
},
{
"epoch": 1.160363810435615,
"grad_norm": 0.3103139102458954,
"learning_rate": 7.709604508951927e-06,
"loss": 0.4296,
"step": 202
},
{
"epoch": 1.1661081857348012,
"grad_norm": 0.29311925172805786,
"learning_rate": 7.68139587958148e-06,
"loss": 0.3973,
"step": 203
},
{
"epoch": 1.1718525610339876,
"grad_norm": 0.2649705111980438,
"learning_rate": 7.653066936984504e-06,
"loss": 0.4285,
"step": 204
},
{
"epoch": 1.1775969363331737,
"grad_norm": 0.3288572430610657,
"learning_rate": 7.6246189522700205e-06,
"loss": 0.4415,
"step": 205
},
{
"epoch": 1.18334131163236,
"grad_norm": 0.3235880732536316,
"learning_rate": 7.596053201888425e-06,
"loss": 0.3963,
"step": 206
},
{
"epoch": 1.1890856869315463,
"grad_norm": 0.26849544048309326,
"learning_rate": 7.56737096757421e-06,
"loss": 0.4009,
"step": 207
},
{
"epoch": 1.1948300622307324,
"grad_norm": 0.34631481766700745,
"learning_rate": 7.538573536288466e-06,
"loss": 0.4067,
"step": 208
},
{
"epoch": 1.2005744375299185,
"grad_norm": 0.2917556166648865,
"learning_rate": 7.509662200161122e-06,
"loss": 0.3852,
"step": 209
},
{
"epoch": 1.206318812829105,
"grad_norm": 0.33460524678230286,
"learning_rate": 7.480638256432977e-06,
"loss": 0.4346,
"step": 210
},
{
"epoch": 1.212063188128291,
"grad_norm": 0.31476372480392456,
"learning_rate": 7.4515030073974915e-06,
"loss": 0.4224,
"step": 211
},
{
"epoch": 1.2178075634274772,
"grad_norm": 0.2829968333244324,
"learning_rate": 7.422257760342351e-06,
"loss": 0.416,
"step": 212
},
{
"epoch": 1.2235519387266636,
"grad_norm": 0.2671918272972107,
"learning_rate": 7.392903827490814e-06,
"loss": 0.3893,
"step": 213
},
{
"epoch": 1.2292963140258497,
"grad_norm": 0.3298075497150421,
"learning_rate": 7.363442525942827e-06,
"loss": 0.3805,
"step": 214
},
{
"epoch": 1.2350406893250359,
"grad_norm": 0.26619213819503784,
"learning_rate": 7.333875177615931e-06,
"loss": 0.4179,
"step": 215
},
{
"epoch": 1.2407850646242222,
"grad_norm": 0.26025766134262085,
"learning_rate": 7.304203109185947e-06,
"loss": 0.425,
"step": 216
},
{
"epoch": 1.2465294399234084,
"grad_norm": 0.2681398093700409,
"learning_rate": 7.274427652027444e-06,
"loss": 0.3768,
"step": 217
},
{
"epoch": 1.2522738152225945,
"grad_norm": 0.30282825231552124,
"learning_rate": 7.244550142154009e-06,
"loss": 0.4041,
"step": 218
},
{
"epoch": 1.2580181905217809,
"grad_norm": 0.24776627123355865,
"learning_rate": 7.214571920158293e-06,
"loss": 0.4106,
"step": 219
},
{
"epoch": 1.263762565820967,
"grad_norm": 0.25247007608413696,
"learning_rate": 7.1844943311518665e-06,
"loss": 0.444,
"step": 220
},
{
"epoch": 1.2695069411201532,
"grad_norm": 0.27782031893730164,
"learning_rate": 7.1543187247048525e-06,
"loss": 0.43,
"step": 221
},
{
"epoch": 1.2752513164193395,
"grad_norm": 0.2578721046447754,
"learning_rate": 7.124046454785387e-06,
"loss": 0.3989,
"step": 222
},
{
"epoch": 1.2809956917185257,
"grad_norm": 0.30569854378700256,
"learning_rate": 7.093678879698858e-06,
"loss": 0.4405,
"step": 223
},
{
"epoch": 1.2867400670177118,
"grad_norm": 0.2797735333442688,
"learning_rate": 7.063217362026957e-06,
"loss": 0.3981,
"step": 224
},
{
"epoch": 1.292484442316898,
"grad_norm": 0.2695583403110504,
"learning_rate": 7.032663268566547e-06,
"loss": 0.3935,
"step": 225
},
{
"epoch": 1.2982288176160843,
"grad_norm": 0.294919490814209,
"learning_rate": 7.002017970268336e-06,
"loss": 0.3952,
"step": 226
},
{
"epoch": 1.3039731929152705,
"grad_norm": 0.2755994200706482,
"learning_rate": 6.97128284217535e-06,
"loss": 0.4327,
"step": 227
},
{
"epoch": 1.3097175682144566,
"grad_norm": 0.2905407249927521,
"learning_rate": 6.9404592633612486e-06,
"loss": 0.4658,
"step": 228
},
{
"epoch": 1.3154619435136428,
"grad_norm": 0.2658722400665283,
"learning_rate": 6.909548616868444e-06,
"loss": 0.4237,
"step": 229
},
{
"epoch": 1.3212063188128291,
"grad_norm": 0.2581554651260376,
"learning_rate": 6.878552289646041e-06,
"loss": 0.41,
"step": 230
},
{
"epoch": 1.3269506941120153,
"grad_norm": 0.2688734233379364,
"learning_rate": 6.847471672487607e-06,
"loss": 0.3662,
"step": 231
},
{
"epoch": 1.3326950694112014,
"grad_norm": 0.30700597167015076,
"learning_rate": 6.816308159968761e-06,
"loss": 0.4595,
"step": 232
},
{
"epoch": 1.3384394447103878,
"grad_norm": 0.271698921918869,
"learning_rate": 6.7850631503846165e-06,
"loss": 0.4073,
"step": 233
},
{
"epoch": 1.344183820009574,
"grad_norm": 0.24926996231079102,
"learning_rate": 6.753738045687021e-06,
"loss": 0.42,
"step": 234
},
{
"epoch": 1.34992819530876,
"grad_norm": 0.2579510807991028,
"learning_rate": 6.722334251421665e-06,
"loss": 0.3975,
"step": 235
},
{
"epoch": 1.3556725706079464,
"grad_norm": 0.2766447067260742,
"learning_rate": 6.690853176665007e-06,
"loss": 0.4539,
"step": 236
},
{
"epoch": 1.3614169459071326,
"grad_norm": 0.2743930220603943,
"learning_rate": 6.659296233961055e-06,
"loss": 0.3857,
"step": 237
},
{
"epoch": 1.3671613212063187,
"grad_norm": 0.2682178318500519,
"learning_rate": 6.627664839257979e-06,
"loss": 0.4127,
"step": 238
},
{
"epoch": 1.372905696505505,
"grad_norm": 0.26600515842437744,
"learning_rate": 6.595960411844589e-06,
"loss": 0.4309,
"step": 239
},
{
"epoch": 1.3786500718046912,
"grad_norm": 0.2679958939552307,
"learning_rate": 6.564184374286636e-06,
"loss": 0.4276,
"step": 240
},
{
"epoch": 1.3843944471038774,
"grad_norm": 0.32891181111335754,
"learning_rate": 6.532338152363001e-06,
"loss": 0.3908,
"step": 241
},
{
"epoch": 1.3901388224030637,
"grad_norm": 0.2673075199127197,
"learning_rate": 6.500423175001705e-06,
"loss": 0.3662,
"step": 242
},
{
"epoch": 1.39588319770225,
"grad_norm": 0.29596373438835144,
"learning_rate": 6.468440874215801e-06,
"loss": 0.4268,
"step": 243
},
{
"epoch": 1.401627573001436,
"grad_norm": 0.2759062349796295,
"learning_rate": 6.43639268503912e-06,
"loss": 0.4216,
"step": 244
},
{
"epoch": 1.4073719483006224,
"grad_norm": 0.2963887155056,
"learning_rate": 6.40428004546188e-06,
"loss": 0.4303,
"step": 245
},
{
"epoch": 1.4131163235998085,
"grad_norm": 0.26301926374435425,
"learning_rate": 6.372104396366162e-06,
"loss": 0.3999,
"step": 246
},
{
"epoch": 1.4188606988989947,
"grad_norm": 0.26165592670440674,
"learning_rate": 6.339867181461265e-06,
"loss": 0.4315,
"step": 247
},
{
"epoch": 1.424605074198181,
"grad_norm": 0.2649396061897278,
"learning_rate": 6.307569847218917e-06,
"loss": 0.3932,
"step": 248
},
{
"epoch": 1.4303494494973672,
"grad_norm": 0.26852184534072876,
"learning_rate": 6.275213842808383e-06,
"loss": 0.4079,
"step": 249
},
{
"epoch": 1.4360938247965533,
"grad_norm": 0.2582874894142151,
"learning_rate": 6.242800620031434e-06,
"loss": 0.4011,
"step": 250
},
{
"epoch": 1.4418382000957397,
"grad_norm": 0.27596113085746765,
"learning_rate": 6.2103316332572095e-06,
"loss": 0.4273,
"step": 251
},
{
"epoch": 1.4475825753949259,
"grad_norm": 0.2603662610054016,
"learning_rate": 6.177808339356954e-06,
"loss": 0.4457,
"step": 252
},
{
"epoch": 1.453326950694112,
"grad_norm": 0.2617853283882141,
"learning_rate": 6.14523219763866e-06,
"loss": 0.3843,
"step": 253
},
{
"epoch": 1.4590713259932984,
"grad_norm": 0.2927669882774353,
"learning_rate": 6.112604669781572e-06,
"loss": 0.409,
"step": 254
},
{
"epoch": 1.4648157012924845,
"grad_norm": 0.2545565962791443,
"learning_rate": 6.079927219770623e-06,
"loss": 0.4197,
"step": 255
},
{
"epoch": 1.4705600765916707,
"grad_norm": 0.25056391954421997,
"learning_rate": 6.047201313830724e-06,
"loss": 0.412,
"step": 256
},
{
"epoch": 1.4763044518908568,
"grad_norm": 0.26942598819732666,
"learning_rate": 6.014428420360987e-06,
"loss": 0.4377,
"step": 257
},
{
"epoch": 1.4820488271900432,
"grad_norm": 0.2583950459957123,
"learning_rate": 5.9816100098688456e-06,
"loss": 0.3939,
"step": 258
},
{
"epoch": 1.4877932024892293,
"grad_norm": 0.286765992641449,
"learning_rate": 5.948747554904054e-06,
"loss": 0.4441,
"step": 259
},
{
"epoch": 1.4935375777884154,
"grad_norm": 0.2546658515930176,
"learning_rate": 5.915842529992632e-06,
"loss": 0.4084,
"step": 260
},
{
"epoch": 1.4992819530876016,
"grad_norm": 0.24491065740585327,
"learning_rate": 5.8828964115706925e-06,
"loss": 0.3642,
"step": 261
},
{
"epoch": 1.505026328386788,
"grad_norm": 0.2614350914955139,
"learning_rate": 5.849910677918205e-06,
"loss": 0.385,
"step": 262
},
{
"epoch": 1.510770703685974,
"grad_norm": 0.263822466135025,
"learning_rate": 5.816886809092651e-06,
"loss": 0.3977,
"step": 263
},
{
"epoch": 1.5165150789851602,
"grad_norm": 0.2711222469806671,
"learning_rate": 5.783826286862631e-06,
"loss": 0.4336,
"step": 264
},
{
"epoch": 1.5222594542843466,
"grad_norm": 0.23413866758346558,
"learning_rate": 5.750730594641367e-06,
"loss": 0.391,
"step": 265
},
{
"epoch": 1.5280038295835328,
"grad_norm": 0.26713666319847107,
"learning_rate": 5.717601217420143e-06,
"loss": 0.3995,
"step": 266
},
{
"epoch": 1.533748204882719,
"grad_norm": 0.2639370262622833,
"learning_rate": 5.68443964170168e-06,
"loss": 0.4074,
"step": 267
},
{
"epoch": 1.5394925801819053,
"grad_norm": 0.2865756154060364,
"learning_rate": 5.6512473554334294e-06,
"loss": 0.4146,
"step": 268
},
{
"epoch": 1.5452369554810914,
"grad_norm": 0.256339430809021,
"learning_rate": 5.618025847940817e-06,
"loss": 0.4143,
"step": 269
},
{
"epoch": 1.5509813307802776,
"grad_norm": 0.24432285130023956,
"learning_rate": 5.584776609860414e-06,
"loss": 0.3997,
"step": 270
},
{
"epoch": 1.556725706079464,
"grad_norm": 0.2671893835067749,
"learning_rate": 5.551501133073048e-06,
"loss": 0.4625,
"step": 271
},
{
"epoch": 1.56247008137865,
"grad_norm": 0.2614923417568207,
"learning_rate": 5.518200910636875e-06,
"loss": 0.3983,
"step": 272
},
{
"epoch": 1.5682144566778362,
"grad_norm": 0.2648710608482361,
"learning_rate": 5.4848774367203715e-06,
"loss": 0.4264,
"step": 273
},
{
"epoch": 1.5739588319770226,
"grad_norm": 0.25889015197753906,
"learning_rate": 5.451532206535306e-06,
"loss": 0.4327,
"step": 274
},
{
"epoch": 1.5797032072762087,
"grad_norm": 0.28398048877716064,
"learning_rate": 5.418166716269636e-06,
"loss": 0.4216,
"step": 275
},
{
"epoch": 1.5854475825753949,
"grad_norm": 0.2506558299064636,
"learning_rate": 5.384782463020385e-06,
"loss": 0.4183,
"step": 276
},
{
"epoch": 1.5911919578745812,
"grad_norm": 0.2716221511363983,
"learning_rate": 5.351380944726465e-06,
"loss": 0.4131,
"step": 277
},
{
"epoch": 1.5969363331737674,
"grad_norm": 0.2257940024137497,
"learning_rate": 5.317963660101464e-06,
"loss": 0.3597,
"step": 278
},
{
"epoch": 1.6026807084729535,
"grad_norm": 0.28616511821746826,
"learning_rate": 5.284532108566396e-06,
"loss": 0.4147,
"step": 279
},
{
"epoch": 1.6084250837721399,
"grad_norm": 0.26668059825897217,
"learning_rate": 5.251087790182428e-06,
"loss": 0.4133,
"step": 280
},
{
"epoch": 1.614169459071326,
"grad_norm": 0.28087204694747925,
"learning_rate": 5.217632205583574e-06,
"loss": 0.4326,
"step": 281
},
{
"epoch": 1.6199138343705122,
"grad_norm": 0.2532707452774048,
"learning_rate": 5.184166855909355e-06,
"loss": 0.4132,
"step": 282
},
{
"epoch": 1.6256582096696985,
"grad_norm": 0.2916858196258545,
"learning_rate": 5.150693242737444e-06,
"loss": 0.4304,
"step": 283
},
{
"epoch": 1.6314025849688847,
"grad_norm": 0.2610633969306946,
"learning_rate": 5.117212868016303e-06,
"loss": 0.4486,
"step": 284
},
{
"epoch": 1.6371469602680708,
"grad_norm": 0.27046632766723633,
"learning_rate": 5.083727233997775e-06,
"loss": 0.3898,
"step": 285
},
{
"epoch": 1.6428913355672572,
"grad_norm": 0.2789868116378784,
"learning_rate": 5.05023784316969e-06,
"loss": 0.4064,
"step": 286
},
{
"epoch": 1.6486357108664431,
"grad_norm": 0.31009382009506226,
"learning_rate": 5.016746198188439e-06,
"loss": 0.44,
"step": 287
},
{
"epoch": 1.6543800861656295,
"grad_norm": 0.24213601648807526,
"learning_rate": 4.983253801811562e-06,
"loss": 0.38,
"step": 288
},
{
"epoch": 1.6601244614648158,
"grad_norm": 0.2574230432510376,
"learning_rate": 4.949762156830312e-06,
"loss": 0.4279,
"step": 289
},
{
"epoch": 1.6658688367640018,
"grad_norm": 0.3645157814025879,
"learning_rate": 4.916272766002227e-06,
"loss": 0.4149,
"step": 290
},
{
"epoch": 1.6716132120631881,
"grad_norm": 0.25819137692451477,
"learning_rate": 4.882787131983698e-06,
"loss": 0.3851,
"step": 291
},
{
"epoch": 1.6773575873623745,
"grad_norm": 0.2552003264427185,
"learning_rate": 4.849306757262558e-06,
"loss": 0.3952,
"step": 292
},
{
"epoch": 1.6831019626615604,
"grad_norm": 0.31992608308792114,
"learning_rate": 4.8158331440906466e-06,
"loss": 0.4331,
"step": 293
},
{
"epoch": 1.6888463379607468,
"grad_norm": 0.276024729013443,
"learning_rate": 4.7823677944164285e-06,
"loss": 0.4551,
"step": 294
},
{
"epoch": 1.694590713259933,
"grad_norm": 0.2532188892364502,
"learning_rate": 4.748912209817572e-06,
"loss": 0.4066,
"step": 295
},
{
"epoch": 1.700335088559119,
"grad_norm": 0.24477799236774445,
"learning_rate": 4.715467891433607e-06,
"loss": 0.3797,
"step": 296
},
{
"epoch": 1.7060794638583054,
"grad_norm": 0.2809993624687195,
"learning_rate": 4.682036339898537e-06,
"loss": 0.4005,
"step": 297
},
{
"epoch": 1.7118238391574916,
"grad_norm": 0.2791358530521393,
"learning_rate": 4.6486190552735375e-06,
"loss": 0.4353,
"step": 298
},
{
"epoch": 1.7175682144566777,
"grad_norm": 0.2509957253932953,
"learning_rate": 4.615217536979616e-06,
"loss": 0.4061,
"step": 299
},
{
"epoch": 1.723312589755864,
"grad_norm": 0.24833880364894867,
"learning_rate": 4.581833283730367e-06,
"loss": 0.3808,
"step": 300
},
{
"epoch": 1.7290569650550502,
"grad_norm": 0.30278247594833374,
"learning_rate": 4.548467793464696e-06,
"loss": 0.4447,
"step": 301
},
{
"epoch": 1.7348013403542364,
"grad_norm": 0.2316392958164215,
"learning_rate": 4.515122563279631e-06,
"loss": 0.3859,
"step": 302
},
{
"epoch": 1.7405457156534228,
"grad_norm": 0.25202158093452454,
"learning_rate": 4.481799089363127e-06,
"loss": 0.3879,
"step": 303
},
{
"epoch": 1.746290090952609,
"grad_norm": 0.2888227701187134,
"learning_rate": 4.448498866926952e-06,
"loss": 0.4187,
"step": 304
},
{
"epoch": 1.752034466251795,
"grad_norm": 0.2678024470806122,
"learning_rate": 4.415223390139588e-06,
"loss": 0.4348,
"step": 305
},
{
"epoch": 1.7577788415509814,
"grad_norm": 0.25489702820777893,
"learning_rate": 4.381974152059184e-06,
"loss": 0.4216,
"step": 306
},
{
"epoch": 1.7635232168501676,
"grad_norm": 0.2304321825504303,
"learning_rate": 4.348752644566573e-06,
"loss": 0.3878,
"step": 307
},
{
"epoch": 1.7692675921493537,
"grad_norm": 0.2631133198738098,
"learning_rate": 4.315560358298321e-06,
"loss": 0.4249,
"step": 308
},
{
"epoch": 1.77501196744854,
"grad_norm": 0.25828343629837036,
"learning_rate": 4.2823987825798575e-06,
"loss": 0.4005,
"step": 309
},
{
"epoch": 1.7807563427477262,
"grad_norm": 0.26033782958984375,
"learning_rate": 4.249269405358634e-06,
"loss": 0.3837,
"step": 310
},
{
"epoch": 1.7865007180469124,
"grad_norm": 0.26372194290161133,
"learning_rate": 4.2161737131373695e-06,
"loss": 0.4337,
"step": 311
},
{
"epoch": 1.7922450933460987,
"grad_norm": 0.24711427092552185,
"learning_rate": 4.183113190907349e-06,
"loss": 0.4053,
"step": 312
},
{
"epoch": 1.7979894686452849,
"grad_norm": 0.2691591680049896,
"learning_rate": 4.150089322081797e-06,
"loss": 0.3914,
"step": 313
},
{
"epoch": 1.803733843944471,
"grad_norm": 0.24242590367794037,
"learning_rate": 4.1171035884293075e-06,
"loss": 0.3837,
"step": 314
},
{
"epoch": 1.8094782192436574,
"grad_norm": 0.24888436496257782,
"learning_rate": 4.084157470007371e-06,
"loss": 0.411,
"step": 315
},
{
"epoch": 1.8152225945428435,
"grad_norm": 0.26371899247169495,
"learning_rate": 4.051252445095946e-06,
"loss": 0.4466,
"step": 316
},
{
"epoch": 1.8209669698420297,
"grad_norm": 0.2363133430480957,
"learning_rate": 4.018389990131156e-06,
"loss": 0.4095,
"step": 317
},
{
"epoch": 1.826711345141216,
"grad_norm": 0.23559501767158508,
"learning_rate": 3.985571579639013e-06,
"loss": 0.3997,
"step": 318
},
{
"epoch": 1.832455720440402,
"grad_norm": 0.23539544641971588,
"learning_rate": 3.952798686169279e-06,
"loss": 0.3972,
"step": 319
},
{
"epoch": 1.8382000957395883,
"grad_norm": 0.2795656621456146,
"learning_rate": 3.920072780229378e-06,
"loss": 0.3976,
"step": 320
},
{
"epoch": 1.8439444710387747,
"grad_norm": 0.2547081410884857,
"learning_rate": 3.887395330218429e-06,
"loss": 0.4259,
"step": 321
},
{
"epoch": 1.8496888463379606,
"grad_norm": 0.22198687493801117,
"learning_rate": 3.854767802361342e-06,
"loss": 0.3728,
"step": 322
},
{
"epoch": 1.855433221637147,
"grad_norm": 0.2664637267589569,
"learning_rate": 3.822191660643047e-06,
"loss": 0.4256,
"step": 323
},
{
"epoch": 1.8611775969363333,
"grad_norm": 0.22866730391979218,
"learning_rate": 3.789668366742792e-06,
"loss": 0.4283,
"step": 324
},
{
"epoch": 1.8669219722355193,
"grad_norm": 0.24711646139621735,
"learning_rate": 3.7571993799685675e-06,
"loss": 0.4124,
"step": 325
},
{
"epoch": 1.8726663475347056,
"grad_norm": 0.2538555860519409,
"learning_rate": 3.7247861571916183e-06,
"loss": 0.3934,
"step": 326
},
{
"epoch": 1.8784107228338918,
"grad_norm": 0.2567046582698822,
"learning_rate": 3.6924301527810856e-06,
"loss": 0.411,
"step": 327
},
{
"epoch": 1.884155098133078,
"grad_norm": 0.2725870907306671,
"learning_rate": 3.6601328185387364e-06,
"loss": 0.4195,
"step": 328
},
{
"epoch": 1.8898994734322643,
"grad_norm": 0.24136009812355042,
"learning_rate": 3.6278956036338397e-06,
"loss": 0.3904,
"step": 329
},
{
"epoch": 1.8956438487314504,
"grad_norm": 0.2512601315975189,
"learning_rate": 3.5957199545381216e-06,
"loss": 0.3869,
"step": 330
},
{
"epoch": 1.9013882240306366,
"grad_norm": 0.24334044754505157,
"learning_rate": 3.5636073149608824e-06,
"loss": 0.4019,
"step": 331
},
{
"epoch": 1.907132599329823,
"grad_norm": 0.24103941023349762,
"learning_rate": 3.5315591257842e-06,
"loss": 0.3609,
"step": 332
},
{
"epoch": 1.912876974629009,
"grad_norm": 0.24972982704639435,
"learning_rate": 3.4995768249982975e-06,
"loss": 0.3843,
"step": 333
},
{
"epoch": 1.9186213499281952,
"grad_norm": 0.24723373353481293,
"learning_rate": 3.467661847637001e-06,
"loss": 0.4227,
"step": 334
},
{
"epoch": 1.9243657252273816,
"grad_norm": 0.24389074742794037,
"learning_rate": 3.4358156257133644e-06,
"loss": 0.4296,
"step": 335
},
{
"epoch": 1.9301101005265677,
"grad_norm": 0.2541641294956207,
"learning_rate": 3.404039588155413e-06,
"loss": 0.3727,
"step": 336
},
{
"epoch": 1.9358544758257539,
"grad_norm": 0.22656212747097015,
"learning_rate": 3.372335160742022e-06,
"loss": 0.3666,
"step": 337
},
{
"epoch": 1.9415988511249402,
"grad_norm": 0.24361655116081238,
"learning_rate": 3.3407037660389474e-06,
"loss": 0.388,
"step": 338
},
{
"epoch": 1.9473432264241264,
"grad_norm": 0.24967317283153534,
"learning_rate": 3.3091468233349934e-06,
"loss": 0.4433,
"step": 339
},
{
"epoch": 1.9530876017233125,
"grad_norm": 0.23722046613693237,
"learning_rate": 3.2776657485783357e-06,
"loss": 0.4124,
"step": 340
},
{
"epoch": 1.958831977022499,
"grad_norm": 0.24369174242019653,
"learning_rate": 3.246261954312979e-06,
"loss": 0.4216,
"step": 341
},
{
"epoch": 1.964576352321685,
"grad_norm": 0.23776546120643616,
"learning_rate": 3.2149368496153856e-06,
"loss": 0.4256,
"step": 342
},
{
"epoch": 1.9703207276208712,
"grad_norm": 0.25640493631362915,
"learning_rate": 3.1836918400312387e-06,
"loss": 0.3812,
"step": 343
},
{
"epoch": 1.9760651029200575,
"grad_norm": 0.24129056930541992,
"learning_rate": 3.152528327512395e-06,
"loss": 0.4002,
"step": 344
},
{
"epoch": 1.9818094782192437,
"grad_norm": 0.24882204830646515,
"learning_rate": 3.1214477103539585e-06,
"loss": 0.414,
"step": 345
},
{
"epoch": 1.9875538535184298,
"grad_norm": 0.24418820440769196,
"learning_rate": 3.0904513831315563e-06,
"loss": 0.4261,
"step": 346
},
{
"epoch": 1.9932982288176162,
"grad_norm": 0.2543267011642456,
"learning_rate": 3.059540736638751e-06,
"loss": 0.4272,
"step": 347
},
{
"epoch": 1.9990426041168023,
"grad_norm": 0.23077604174613953,
"learning_rate": 3.028717157824652e-06,
"loss": 0.3639,
"step": 348
},
{
"epoch": 2.0047869794159885,
"grad_norm": 0.9153398871421814,
"learning_rate": 2.9979820297316652e-06,
"loss": 0.6275,
"step": 349
},
{
"epoch": 2.010531354715175,
"grad_norm": 0.31731805205345154,
"learning_rate": 2.9673367314334533e-06,
"loss": 0.4267,
"step": 350
},
{
"epoch": 2.016275730014361,
"grad_norm": 0.2978307008743286,
"learning_rate": 2.936782637973044e-06,
"loss": 0.3788,
"step": 351
},
{
"epoch": 2.022020105313547,
"grad_norm": 0.28210750222206116,
"learning_rate": 2.9063211203011443e-06,
"loss": 0.383,
"step": 352
},
{
"epoch": 2.0277644806127335,
"grad_norm": 0.28862592577934265,
"learning_rate": 2.8759535452146128e-06,
"loss": 0.3827,
"step": 353
},
{
"epoch": 2.0335088559119194,
"grad_norm": 0.3073420524597168,
"learning_rate": 2.8456812752951483e-06,
"loss": 0.3858,
"step": 354
},
{
"epoch": 2.039253231211106,
"grad_norm": 0.27528196573257446,
"learning_rate": 2.815505668848136e-06,
"loss": 0.3536,
"step": 355
},
{
"epoch": 2.044997606510292,
"grad_norm": 0.25948259234428406,
"learning_rate": 2.785428079841709e-06,
"loss": 0.3844,
"step": 356
},
{
"epoch": 2.050741981809478,
"grad_norm": 0.3084884285926819,
"learning_rate": 2.755449857845992e-06,
"loss": 0.3772,
"step": 357
},
{
"epoch": 2.0564863571086645,
"grad_norm": 0.2928623855113983,
"learning_rate": 2.725572347972558e-06,
"loss": 0.3562,
"step": 358
},
{
"epoch": 2.062230732407851,
"grad_norm": 0.27207672595977783,
"learning_rate": 2.6957968908140546e-06,
"loss": 0.362,
"step": 359
},
{
"epoch": 2.0679751077070367,
"grad_norm": 0.2691848576068878,
"learning_rate": 2.666124822384071e-06,
"loss": 0.3807,
"step": 360
},
{
"epoch": 2.073719483006223,
"grad_norm": 0.22607873380184174,
"learning_rate": 2.636557474057173e-06,
"loss": 0.3436,
"step": 361
},
{
"epoch": 2.0794638583054095,
"grad_norm": 0.25091448426246643,
"learning_rate": 2.607096172509187e-06,
"loss": 0.374,
"step": 362
},
{
"epoch": 2.0852082336045954,
"grad_norm": 0.306155800819397,
"learning_rate": 2.5777422396576503e-06,
"loss": 0.4164,
"step": 363
},
{
"epoch": 2.0909526089037818,
"grad_norm": 0.25360941886901855,
"learning_rate": 2.5484969926025114e-06,
"loss": 0.3706,
"step": 364
},
{
"epoch": 2.096696984202968,
"grad_norm": 0.2415456920862198,
"learning_rate": 2.5193617435670244e-06,
"loss": 0.4131,
"step": 365
},
{
"epoch": 2.102441359502154,
"grad_norm": 0.23606958985328674,
"learning_rate": 2.4903377998388783e-06,
"loss": 0.3781,
"step": 366
},
{
"epoch": 2.1081857348013404,
"grad_norm": 0.2365504652261734,
"learning_rate": 2.461426463711535e-06,
"loss": 0.3769,
"step": 367
},
{
"epoch": 2.113930110100527,
"grad_norm": 0.23365849256515503,
"learning_rate": 2.4326290324257896e-06,
"loss": 0.3573,
"step": 368
},
{
"epoch": 2.1196744853997127,
"grad_norm": 0.24357256293296814,
"learning_rate": 2.403946798111576e-06,
"loss": 0.3505,
"step": 369
},
{
"epoch": 2.125418860698899,
"grad_norm": 0.28438547253608704,
"learning_rate": 2.37538104772998e-06,
"loss": 0.4116,
"step": 370
},
{
"epoch": 2.131163235998085,
"grad_norm": 0.21200957894325256,
"learning_rate": 2.3469330630154974e-06,
"loss": 0.326,
"step": 371
},
{
"epoch": 2.1369076112972714,
"grad_norm": 0.2492566853761673,
"learning_rate": 2.318604120418521e-06,
"loss": 0.3999,
"step": 372
},
{
"epoch": 2.1426519865964577,
"grad_norm": 0.22796769440174103,
"learning_rate": 2.2903954910480746e-06,
"loss": 0.391,
"step": 373
},
{
"epoch": 2.1483963618956436,
"grad_norm": 0.23675256967544556,
"learning_rate": 2.2623084406147643e-06,
"loss": 0.362,
"step": 374
},
{
"epoch": 2.15414073719483,
"grad_norm": 0.22599707543849945,
"learning_rate": 2.234344229374003e-06,
"loss": 0.3744,
"step": 375
},
{
"epoch": 2.1598851124940164,
"grad_norm": 0.23200853168964386,
"learning_rate": 2.2065041120694487e-06,
"loss": 0.3748,
"step": 376
},
{
"epoch": 2.1656294877932023,
"grad_norm": 0.22807936370372772,
"learning_rate": 2.178789337876716e-06,
"loss": 0.3762,
"step": 377
},
{
"epoch": 2.1713738630923887,
"grad_norm": 0.21814514696598053,
"learning_rate": 2.151201150347318e-06,
"loss": 0.3669,
"step": 378
},
{
"epoch": 2.177118238391575,
"grad_norm": 0.22707633674144745,
"learning_rate": 2.123740787352872e-06,
"loss": 0.341,
"step": 379
},
{
"epoch": 2.182862613690761,
"grad_norm": 0.2529568672180176,
"learning_rate": 2.096409481029556e-06,
"loss": 0.3723,
"step": 380
},
{
"epoch": 2.1886069889899473,
"grad_norm": 0.231676384806633,
"learning_rate": 2.069208457722828e-06,
"loss": 0.3581,
"step": 381
},
{
"epoch": 2.1943513642891337,
"grad_norm": 0.22723394632339478,
"learning_rate": 2.042138937932388e-06,
"loss": 0.3621,
"step": 382
},
{
"epoch": 2.2000957395883196,
"grad_norm": 0.24381379783153534,
"learning_rate": 2.015202136257432e-06,
"loss": 0.411,
"step": 383
},
{
"epoch": 2.205840114887506,
"grad_norm": 0.22075577080249786,
"learning_rate": 1.988399261342135e-06,
"loss": 0.3448,
"step": 384
},
{
"epoch": 2.2115844901866923,
"grad_norm": 0.2534730136394501,
"learning_rate": 1.9617315158214363e-06,
"loss": 0.3949,
"step": 385
},
{
"epoch": 2.2173288654858783,
"grad_norm": 0.2261105328798294,
"learning_rate": 1.935200096267064e-06,
"loss": 0.3752,
"step": 386
},
{
"epoch": 2.2230732407850646,
"grad_norm": 0.22049082815647125,
"learning_rate": 1.908806193133855e-06,
"loss": 0.3693,
"step": 387
},
{
"epoch": 2.228817616084251,
"grad_norm": 0.21172842383384705,
"learning_rate": 1.8825509907063328e-06,
"loss": 0.395,
"step": 388
},
{
"epoch": 2.234561991383437,
"grad_norm": 0.24431262910366058,
"learning_rate": 1.856435667045577e-06,
"loss": 0.3871,
"step": 389
},
{
"epoch": 2.2403063666826233,
"grad_norm": 0.24871356785297394,
"learning_rate": 1.8304613939363531e-06,
"loss": 0.3852,
"step": 390
},
{
"epoch": 2.2460507419818097,
"grad_norm": 0.21080052852630615,
"learning_rate": 1.8046293368345485e-06,
"loss": 0.3687,
"step": 391
},
{
"epoch": 2.2517951172809956,
"grad_norm": 0.1993173360824585,
"learning_rate": 1.7789406548148647e-06,
"loss": 0.3554,
"step": 392
},
{
"epoch": 2.257539492580182,
"grad_norm": 0.23289141058921814,
"learning_rate": 1.7533965005188242e-06,
"loss": 0.4427,
"step": 393
},
{
"epoch": 2.2632838678793683,
"grad_norm": 0.20976853370666504,
"learning_rate": 1.7279980201030382e-06,
"loss": 0.3516,
"step": 394
},
{
"epoch": 2.2690282431785542,
"grad_norm": 0.23207247257232666,
"learning_rate": 1.7027463531877897e-06,
"loss": 0.3759,
"step": 395
},
{
"epoch": 2.2747726184777406,
"grad_norm": 0.22939598560333252,
"learning_rate": 1.677642632805892e-06,
"loss": 0.3825,
"step": 396
},
{
"epoch": 2.280516993776927,
"grad_norm": 0.22694642841815948,
"learning_rate": 1.6526879853518558e-06,
"loss": 0.3804,
"step": 397
},
{
"epoch": 2.286261369076113,
"grad_norm": 0.2267027348279953,
"learning_rate": 1.6278835305313462e-06,
"loss": 0.3835,
"step": 398
},
{
"epoch": 2.2920057443752992,
"grad_norm": 0.23273469507694244,
"learning_rate": 1.6032303813109368e-06,
"loss": 0.3769,
"step": 399
},
{
"epoch": 2.297750119674485,
"grad_norm": 0.22379297018051147,
"learning_rate": 1.578729643868181e-06,
"loss": 0.3861,
"step": 400
},
{
"epoch": 2.3034944949736715,
"grad_norm": 0.2582657039165497,
"learning_rate": 1.5543824175419691e-06,
"loss": 0.3953,
"step": 401
},
{
"epoch": 2.309238870272858,
"grad_norm": 0.20334425568580627,
"learning_rate": 1.5301897947832063e-06,
"loss": 0.3766,
"step": 402
},
{
"epoch": 2.3149832455720443,
"grad_norm": 0.20961356163024902,
"learning_rate": 1.5061528611057917e-06,
"loss": 0.3515,
"step": 403
},
{
"epoch": 2.32072762087123,
"grad_norm": 0.226267471909523,
"learning_rate": 1.4822726950379207e-06,
"loss": 0.4064,
"step": 404
},
{
"epoch": 2.3264719961704166,
"grad_norm": 0.2132333666086197,
"learning_rate": 1.4585503680736756e-06,
"loss": 0.3639,
"step": 405
},
{
"epoch": 2.3322163714696025,
"grad_norm": 0.22161203622817993,
"learning_rate": 1.4349869446249664e-06,
"loss": 0.3643,
"step": 406
},
{
"epoch": 2.337960746768789,
"grad_norm": 0.21189740300178528,
"learning_rate": 1.4115834819737534e-06,
"loss": 0.3316,
"step": 407
},
{
"epoch": 2.343705122067975,
"grad_norm": 0.2345123291015625,
"learning_rate": 1.3883410302246237e-06,
"loss": 0.3701,
"step": 408
},
{
"epoch": 2.349449497367161,
"grad_norm": 0.23276999592781067,
"learning_rate": 1.3652606322576606e-06,
"loss": 0.3434,
"step": 409
},
{
"epoch": 2.3551938726663475,
"grad_norm": 0.21485473215579987,
"learning_rate": 1.3423433236816563e-06,
"loss": 0.3821,
"step": 410
},
{
"epoch": 2.360938247965534,
"grad_norm": 0.21681098639965057,
"learning_rate": 1.3195901327876426e-06,
"loss": 0.3717,
"step": 411
},
{
"epoch": 2.36668262326472,
"grad_norm": 0.2142193466424942,
"learning_rate": 1.2970020805027555e-06,
"loss": 0.3555,
"step": 412
},
{
"epoch": 2.372426998563906,
"grad_norm": 0.2359546720981598,
"learning_rate": 1.2745801803444192e-06,
"loss": 0.3989,
"step": 413
},
{
"epoch": 2.3781713738630925,
"grad_norm": 0.25387322902679443,
"learning_rate": 1.25232543837488e-06,
"loss": 0.3943,
"step": 414
},
{
"epoch": 2.3839157491622784,
"grad_norm": 0.21573728322982788,
"learning_rate": 1.2302388531560515e-06,
"loss": 0.3693,
"step": 415
},
{
"epoch": 2.389660124461465,
"grad_norm": 0.2112240195274353,
"learning_rate": 1.2083214157047257e-06,
"loss": 0.3896,
"step": 416
},
{
"epoch": 2.395404499760651,
"grad_norm": 0.21627095341682434,
"learning_rate": 1.186574109448091e-06,
"loss": 0.3827,
"step": 417
},
{
"epoch": 2.401148875059837,
"grad_norm": 0.20708608627319336,
"learning_rate": 1.164997910179615e-06,
"loss": 0.3978,
"step": 418
},
{
"epoch": 2.4068932503590235,
"grad_norm": 0.21296700835227966,
"learning_rate": 1.1435937860152579e-06,
"loss": 0.384,
"step": 419
},
{
"epoch": 2.41263762565821,
"grad_norm": 0.21074937283992767,
"learning_rate": 1.1223626973500395e-06,
"loss": 0.3468,
"step": 420
},
{
"epoch": 2.4183820009573957,
"grad_norm": 0.22150783240795135,
"learning_rate": 1.1013055968149343e-06,
"loss": 0.3817,
"step": 421
},
{
"epoch": 2.424126376256582,
"grad_norm": 0.21905986964702606,
"learning_rate": 1.0804234292341426e-06,
"loss": 0.3984,
"step": 422
},
{
"epoch": 2.4298707515557685,
"grad_norm": 0.20688596367835999,
"learning_rate": 1.0597171315826805e-06,
"loss": 0.3382,
"step": 423
},
{
"epoch": 2.4356151268549544,
"grad_norm": 0.21128499507904053,
"learning_rate": 1.0391876329443534e-06,
"loss": 0.3582,
"step": 424
},
{
"epoch": 2.4413595021541408,
"grad_norm": 0.23447024822235107,
"learning_rate": 1.0188358544700583e-06,
"loss": 0.3776,
"step": 425
},
{
"epoch": 2.447103877453327,
"grad_norm": 0.21497882902622223,
"learning_rate": 9.986627093364542e-07,
"loss": 0.4066,
"step": 426
},
{
"epoch": 2.452848252752513,
"grad_norm": 0.20009803771972656,
"learning_rate": 9.786691027049893e-07,
"loss": 0.3365,
"step": 427
},
{
"epoch": 2.4585926280516994,
"grad_norm": 0.24452891945838928,
"learning_rate": 9.588559316812906e-07,
"loss": 0.4195,
"step": 428
},
{
"epoch": 2.4643370033508853,
"grad_norm": 0.20632074773311615,
"learning_rate": 9.392240852749007e-07,
"loss": 0.3858,
"step": 429
},
{
"epoch": 2.4700813786500717,
"grad_norm": 0.2089974731206894,
"learning_rate": 9.197744443594003e-07,
"loss": 0.3525,
"step": 430
},
{
"epoch": 2.475825753949258,
"grad_norm": 0.20910853147506714,
"learning_rate": 9.005078816328772e-07,
"loss": 0.4122,
"step": 431
},
{
"epoch": 2.4815701292484444,
"grad_norm": 0.2021329402923584,
"learning_rate": 8.814252615787661e-07,
"loss": 0.3664,
"step": 432
},
{
"epoch": 2.4873145045476304,
"grad_norm": 0.22205480933189392,
"learning_rate": 8.625274404270662e-07,
"loss": 0.4036,
"step": 433
},
{
"epoch": 2.4930588798468167,
"grad_norm": 0.21877549588680267,
"learning_rate": 8.438152661159165e-07,
"loss": 0.3557,
"step": 434
},
{
"epoch": 2.4988032551460027,
"grad_norm": 0.2282644808292389,
"learning_rate": 8.252895782535569e-07,
"loss": 0.3974,
"step": 435
},
{
"epoch": 2.504547630445189,
"grad_norm": 0.19086650013923645,
"learning_rate": 8.069512080806441e-07,
"loss": 0.3348,
"step": 436
},
{
"epoch": 2.5102920057443754,
"grad_norm": 0.22070568799972534,
"learning_rate": 7.88800978432967e-07,
"loss": 0.3948,
"step": 437
},
{
"epoch": 2.5160363810435618,
"grad_norm": 0.2190258651971817,
"learning_rate": 7.708397037045129e-07,
"loss": 0.4058,
"step": 438
},
{
"epoch": 2.5217807563427477,
"grad_norm": 0.19553068280220032,
"learning_rate": 7.530681898109393e-07,
"loss": 0.3539,
"step": 439
},
{
"epoch": 2.527525131641934,
"grad_norm": 0.22662606835365295,
"learning_rate": 7.35487234153402e-07,
"loss": 0.4109,
"step": 440
},
{
"epoch": 2.53326950694112,
"grad_norm": 0.229027658700943,
"learning_rate": 7.180976255827809e-07,
"loss": 0.4039,
"step": 441
},
{
"epoch": 2.5390138822403063,
"grad_norm": 0.19389323890209198,
"learning_rate": 7.009001443642843e-07,
"loss": 0.3364,
"step": 442
},
{
"epoch": 2.5447582575394927,
"grad_norm": 0.18867188692092896,
"learning_rate": 6.838955621424404e-07,
"loss": 0.3302,
"step": 443
},
{
"epoch": 2.550502632838679,
"grad_norm": 0.22040928900241852,
"learning_rate": 6.67084641906468e-07,
"loss": 0.4042,
"step": 444
},
{
"epoch": 2.556247008137865,
"grad_norm": 0.20983703434467316,
"learning_rate": 6.50468137956049e-07,
"loss": 0.3732,
"step": 445
},
{
"epoch": 2.5619913834370514,
"grad_norm": 0.1974366009235382,
"learning_rate": 6.340467958674762e-07,
"loss": 0.3608,
"step": 446
},
{
"epoch": 2.5677357587362373,
"grad_norm": 0.23682722449302673,
"learning_rate": 6.178213524602061e-07,
"loss": 0.396,
"step": 447
},
{
"epoch": 2.5734801340354236,
"grad_norm": 0.21820193529129028,
"learning_rate": 6.017925357637932e-07,
"loss": 0.3567,
"step": 448
},
{
"epoch": 2.57922450933461,
"grad_norm": 0.22981062531471252,
"learning_rate": 5.859610649852249e-07,
"loss": 0.3642,
"step": 449
},
{
"epoch": 2.584968884633796,
"grad_norm": 0.2145363688468933,
"learning_rate": 5.703276504766514e-07,
"loss": 0.3495,
"step": 450
},
{
"epoch": 2.5907132599329823,
"grad_norm": 0.19343282282352448,
"learning_rate": 5.548929937035147e-07,
"loss": 0.3339,
"step": 451
},
{
"epoch": 2.5964576352321687,
"grad_norm": 0.20028981566429138,
"learning_rate": 5.396577872130676e-07,
"loss": 0.3781,
"step": 452
},
{
"epoch": 2.6022020105313546,
"grad_norm": 0.20916354656219482,
"learning_rate": 5.246227146033089e-07,
"loss": 0.398,
"step": 453
},
{
"epoch": 2.607946385830541,
"grad_norm": 0.19891418516635895,
"learning_rate": 5.097884504922996e-07,
"loss": 0.3522,
"step": 454
},
{
"epoch": 2.6136907611297273,
"grad_norm": 0.22149665653705597,
"learning_rate": 4.951556604879049e-07,
"loss": 0.4384,
"step": 455
},
{
"epoch": 2.6194351364289132,
"grad_norm": 0.20566165447235107,
"learning_rate": 4.807250011579168e-07,
"loss": 0.3747,
"step": 456
},
{
"epoch": 2.6251795117280996,
"grad_norm": 0.20062044262886047,
"learning_rate": 4.6649712000060297e-07,
"loss": 0.3277,
"step": 457
},
{
"epoch": 2.6309238870272855,
"grad_norm": 0.20748507976531982,
"learning_rate": 4.5247265541564836e-07,
"loss": 0.3733,
"step": 458
},
{
"epoch": 2.636668262326472,
"grad_norm": 0.20769307017326355,
"learning_rate": 4.386522366755169e-07,
"loss": 0.3715,
"step": 459
},
{
"epoch": 2.6424126376256583,
"grad_norm": 0.21871528029441833,
"learning_rate": 4.250364838972065e-07,
"loss": 0.4001,
"step": 460
},
{
"epoch": 2.6481570129248446,
"grad_norm": 0.19270025193691254,
"learning_rate": 4.116260080144352e-07,
"loss": 0.3847,
"step": 461
},
{
"epoch": 2.6539013882240305,
"grad_norm": 0.22346258163452148,
"learning_rate": 3.98421410750221e-07,
"loss": 0.3865,
"step": 462
},
{
"epoch": 2.659645763523217,
"grad_norm": 0.2189069241285324,
"learning_rate": 3.854232845898859e-07,
"loss": 0.3777,
"step": 463
},
{
"epoch": 2.665390138822403,
"grad_norm": 0.19334660470485687,
"learning_rate": 3.7263221275447125e-07,
"loss": 0.3738,
"step": 464
},
{
"epoch": 2.671134514121589,
"grad_norm": 0.1997268944978714,
"learning_rate": 3.60048769174568e-07,
"loss": 0.3713,
"step": 465
},
{
"epoch": 2.6768788894207756,
"grad_norm": 0.21596620976924896,
"learning_rate": 3.4767351846456744e-07,
"loss": 0.3963,
"step": 466
},
{
"epoch": 2.682623264719962,
"grad_norm": 0.2167111337184906,
"learning_rate": 3.355070158973212e-07,
"loss": 0.3719,
"step": 467
},
{
"epoch": 2.688367640019148,
"grad_norm": 0.22667430341243744,
"learning_rate": 3.235498073792342e-07,
"loss": 0.3777,
"step": 468
},
{
"epoch": 2.694112015318334,
"grad_norm": 0.22425274550914764,
"learning_rate": 3.118024294257621e-07,
"loss": 0.3934,
"step": 469
},
{
"epoch": 2.69985639061752,
"grad_norm": 0.21314410865306854,
"learning_rate": 3.002654091373453e-07,
"loss": 0.3514,
"step": 470
},
{
"epoch": 2.7056007659167065,
"grad_norm": 0.20535998046398163,
"learning_rate": 2.889392641757527e-07,
"loss": 0.4027,
"step": 471
},
{
"epoch": 2.711345141215893,
"grad_norm": 0.19453689455986023,
"learning_rate": 2.778245027408566e-07,
"loss": 0.3619,
"step": 472
},
{
"epoch": 2.7170895165150792,
"grad_norm": 0.2257540374994278,
"learning_rate": 2.669216235478295e-07,
"loss": 0.4111,
"step": 473
},
{
"epoch": 2.722833891814265,
"grad_norm": 0.22148896753787994,
"learning_rate": 2.562311158047692e-07,
"loss": 0.3853,
"step": 474
},
{
"epoch": 2.7285782671134515,
"grad_norm": 0.23967334628105164,
"learning_rate": 2.45753459190744e-07,
"loss": 0.3789,
"step": 475
},
{
"epoch": 2.7343226424126374,
"grad_norm": 0.21828240156173706,
"learning_rate": 2.354891238342738e-07,
"loss": 0.3635,
"step": 476
},
{
"epoch": 2.740067017711824,
"grad_norm": 0.22273699939250946,
"learning_rate": 2.254385702922318e-07,
"loss": 0.3866,
"step": 477
},
{
"epoch": 2.74581139301101,
"grad_norm": 0.21624407172203064,
"learning_rate": 2.1560224952918373e-07,
"loss": 0.3334,
"step": 478
},
{
"epoch": 2.751555768310196,
"grad_norm": 0.20374171435832977,
"learning_rate": 2.0598060289714893e-07,
"loss": 0.333,
"step": 479
},
{
"epoch": 2.7573001436093825,
"grad_norm": 0.21629025042057037,
"learning_rate": 1.9657406211579966e-07,
"loss": 0.4038,
"step": 480
},
{
"epoch": 2.763044518908569,
"grad_norm": 0.19769959151744843,
"learning_rate": 1.8738304925308926e-07,
"loss": 0.3912,
"step": 481
},
{
"epoch": 2.7687888942077548,
"grad_norm": 0.20424911379814148,
"learning_rate": 1.7840797670631572e-07,
"loss": 0.3857,
"step": 482
},
{
"epoch": 2.774533269506941,
"grad_norm": 0.22209753096103668,
"learning_rate": 1.6964924718361364e-07,
"loss": 0.377,
"step": 483
},
{
"epoch": 2.7802776448061275,
"grad_norm": 0.18916495144367218,
"learning_rate": 1.6110725368589041e-07,
"loss": 0.3567,
"step": 484
},
{
"epoch": 2.7860220201053134,
"grad_norm": 0.21668356657028198,
"learning_rate": 1.5278237948918585e-07,
"loss": 0.4076,
"step": 485
},
{
"epoch": 2.7917663954045,
"grad_norm": 0.20810151100158691,
"learning_rate": 1.4467499812748143e-07,
"loss": 0.3784,
"step": 486
},
{
"epoch": 2.7975107707036857,
"grad_norm": 0.2041124403476715,
"learning_rate": 1.3678547337593494e-07,
"loss": 0.3887,
"step": 487
},
{
"epoch": 2.803255146002872,
"grad_norm": 0.24317069351673126,
"learning_rate": 1.2911415923456017e-07,
"loss": 0.3808,
"step": 488
},
{
"epoch": 2.8089995213020584,
"grad_norm": 0.19764302670955658,
"learning_rate": 1.2166139991234227e-07,
"loss": 0.3556,
"step": 489
},
{
"epoch": 2.814743896601245,
"grad_norm": 0.22858253121376038,
"learning_rate": 1.1442752981179527e-07,
"loss": 0.3944,
"step": 490
},
{
"epoch": 2.8204882719004307,
"grad_norm": 0.20770110189914703,
"learning_rate": 1.0741287351395402e-07,
"loss": 0.4109,
"step": 491
},
{
"epoch": 2.826232647199617,
"grad_norm": 0.20658861100673676,
"learning_rate": 1.0061774576381411e-07,
"loss": 0.375,
"step": 492
},
{
"epoch": 2.831977022498803,
"grad_norm": 0.21842309832572937,
"learning_rate": 9.404245145620717e-08,
"loss": 0.4091,
"step": 493
},
{
"epoch": 2.8377213977979894,
"grad_norm": 0.20370720326900482,
"learning_rate": 8.768728562211948e-08,
"loss": 0.3252,
"step": 494
},
{
"epoch": 2.8434657730971757,
"grad_norm": 0.20084549486637115,
"learning_rate": 8.155253341545655e-08,
"loss": 0.4111,
"step": 495
},
{
"epoch": 2.849210148396362,
"grad_norm": 0.23278358578681946,
"learning_rate": 7.563847010024716e-08,
"loss": 0.3762,
"step": 496
},
{
"epoch": 2.854954523695548,
"grad_norm": 0.20328493416309357,
"learning_rate": 6.994536103829164e-08,
"loss": 0.3765,
"step": 497
},
{
"epoch": 2.8606988989947344,
"grad_norm": 0.20402538776397705,
"learning_rate": 6.447346167725688e-08,
"loss": 0.3449,
"step": 498
},
{
"epoch": 2.8664432742939203,
"grad_norm": 0.22473885118961334,
"learning_rate": 5.9223017539213335e-08,
"loss": 0.4115,
"step": 499
},
{
"epoch": 2.8721876495931067,
"grad_norm": 0.18734395503997803,
"learning_rate": 5.4194264209617705e-08,
"loss": 0.3474,
"step": 500
},
{
"epoch": 2.877932024892293,
"grad_norm": 0.2044266164302826,
"learning_rate": 4.9387427326745287e-08,
"loss": 0.3939,
"step": 501
},
{
"epoch": 2.8836764001914794,
"grad_norm": 0.21485216915607452,
"learning_rate": 4.4802722571561374e-08,
"loss": 0.4016,
"step": 502
},
{
"epoch": 2.8894207754906653,
"grad_norm": 0.22615912556648254,
"learning_rate": 4.044035565804793e-08,
"loss": 0.3599,
"step": 503
},
{
"epoch": 2.8951651507898517,
"grad_norm": 0.2036847621202469,
"learning_rate": 3.6300522323969855e-08,
"loss": 0.3266,
"step": 504
},
{
"epoch": 2.9009095260890376,
"grad_norm": 0.2017732411623001,
"learning_rate": 3.2383408322095856e-08,
"loss": 0.4101,
"step": 505
},
{
"epoch": 2.906653901388224,
"grad_norm": 0.19980326294898987,
"learning_rate": 2.8689189411859607e-08,
"loss": 0.3482,
"step": 506
},
{
"epoch": 2.9123982766874104,
"grad_norm": 0.221945121884346,
"learning_rate": 2.5218031351478268e-08,
"loss": 0.4437,
"step": 507
},
{
"epoch": 2.9181426519865967,
"grad_norm": 0.2403242588043213,
"learning_rate": 2.1970089890509527e-08,
"loss": 0.3528,
"step": 508
},
{
"epoch": 2.9238870272857826,
"grad_norm": 0.22222495079040527,
"learning_rate": 1.8945510762868325e-08,
"loss": 0.3765,
"step": 509
},
{
"epoch": 2.929631402584969,
"grad_norm": 0.20567429065704346,
"learning_rate": 1.614442968028429e-08,
"loss": 0.3844,
"step": 510
},
{
"epoch": 2.935375777884155,
"grad_norm": 0.1993645876646042,
"learning_rate": 1.3566972326214956e-08,
"loss": 0.3712,
"step": 511
},
{
"epoch": 2.9411201531833413,
"grad_norm": 0.21140974760055542,
"learning_rate": 1.1213254350202486e-08,
"loss": 0.357,
"step": 512
},
{
"epoch": 2.9468645284825277,
"grad_norm": 0.20113182067871094,
"learning_rate": 9.083381362690603e-09,
"loss": 0.3692,
"step": 513
},
{
"epoch": 2.9526089037817136,
"grad_norm": 0.2018483579158783,
"learning_rate": 7.177448930279496e-09,
"loss": 0.3817,
"step": 514
},
{
"epoch": 2.9583532790809,
"grad_norm": 0.21870052814483643,
"learning_rate": 5.495542571443135e-09,
"loss": 0.3505,
"step": 515
},
{
"epoch": 2.9640976543800863,
"grad_norm": 0.2133331298828125,
"learning_rate": 4.037737752686788e-09,
"loss": 0.3767,
"step": 516
},
{
"epoch": 2.9698420296792722,
"grad_norm": 0.19916494190692902,
"learning_rate": 2.8040998851674996e-09,
"loss": 0.374,
"step": 517
},
{
"epoch": 2.9755864049784586,
"grad_norm": 0.2075948715209961,
"learning_rate": 1.7946843217514498e-09,
"loss": 0.3572,
"step": 518
},
{
"epoch": 2.981330780277645,
"grad_norm": 0.20564815402030945,
"learning_rate": 1.009536354537044e-09,
"loss": 0.3296,
"step": 519
},
{
"epoch": 2.987075155576831,
"grad_norm": 0.22070202231407166,
"learning_rate": 4.486912128182086e-10,
"loss": 0.4276,
"step": 520
},
{
"epoch": 2.9928195308760173,
"grad_norm": 0.19630590081214905,
"learning_rate": 1.1217406150676457e-10,
"loss": 0.3737,
"step": 521
},
{
"epoch": 2.998563906175203,
"grad_norm": 0.20596085488796234,
"learning_rate": 0.0,
"loss": 0.3589,
"step": 522
},
{
"epoch": 2.998563906175203,
"step": 522,
"total_flos": 1806937112969216.0,
"train_loss": 0.43391252791516166,
"train_runtime": 96882.8659,
"train_samples_per_second": 0.517,
"train_steps_per_second": 0.005
}
],
"logging_steps": 1,
"max_steps": 522,
"num_input_tokens_seen": 0,
"num_train_epochs": 3,
"save_steps": 100,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 1806937112969216.0,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}