|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.998563906175203, |
|
"eval_steps": 500, |
|
"global_step": 522, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0057443752991862135, |
|
"grad_norm": 5.37349271774292, |
|
"learning_rate": 1.886792452830189e-07, |
|
"loss": 0.8587, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.011488750598372427, |
|
"grad_norm": 5.775772571563721, |
|
"learning_rate": 3.773584905660378e-07, |
|
"loss": 0.8937, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01723312589755864, |
|
"grad_norm": 5.701127529144287, |
|
"learning_rate": 5.660377358490567e-07, |
|
"loss": 0.9179, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.022977501196744854, |
|
"grad_norm": 5.48037576675415, |
|
"learning_rate": 7.547169811320755e-07, |
|
"loss": 0.8829, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.028721876495931067, |
|
"grad_norm": 5.497180938720703, |
|
"learning_rate": 9.433962264150944e-07, |
|
"loss": 0.8801, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.03446625179511728, |
|
"grad_norm": 5.722774028778076, |
|
"learning_rate": 1.1320754716981133e-06, |
|
"loss": 0.8665, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.040210627094303494, |
|
"grad_norm": 5.297784328460693, |
|
"learning_rate": 1.3207547169811322e-06, |
|
"loss": 0.8801, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.04595500239348971, |
|
"grad_norm": 4.2470808029174805, |
|
"learning_rate": 1.509433962264151e-06, |
|
"loss": 0.8368, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.05169937769267592, |
|
"grad_norm": 3.891289234161377, |
|
"learning_rate": 1.6981132075471698e-06, |
|
"loss": 0.8067, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.057443752991862135, |
|
"grad_norm": 3.872174024581909, |
|
"learning_rate": 1.8867924528301889e-06, |
|
"loss": 0.8609, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.06318812829104835, |
|
"grad_norm": 2.5940165519714355, |
|
"learning_rate": 2.075471698113208e-06, |
|
"loss": 0.8291, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.06893250359023456, |
|
"grad_norm": 2.293347120285034, |
|
"learning_rate": 2.2641509433962266e-06, |
|
"loss": 0.7539, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.07467687888942078, |
|
"grad_norm": 2.191865921020508, |
|
"learning_rate": 2.4528301886792453e-06, |
|
"loss": 0.8272, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.08042125418860699, |
|
"grad_norm": 2.2178380489349365, |
|
"learning_rate": 2.6415094339622644e-06, |
|
"loss": 0.8099, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0861656294877932, |
|
"grad_norm": 3.483651638031006, |
|
"learning_rate": 2.830188679245283e-06, |
|
"loss": 0.806, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.09191000478697942, |
|
"grad_norm": 3.8942596912384033, |
|
"learning_rate": 3.018867924528302e-06, |
|
"loss": 0.8085, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.09765438008616563, |
|
"grad_norm": 3.8473079204559326, |
|
"learning_rate": 3.207547169811321e-06, |
|
"loss": 0.7816, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.10339875538535184, |
|
"grad_norm": 3.5139431953430176, |
|
"learning_rate": 3.3962264150943395e-06, |
|
"loss": 0.7845, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.10914313068453806, |
|
"grad_norm": 2.965437412261963, |
|
"learning_rate": 3.5849056603773586e-06, |
|
"loss": 0.7167, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.11488750598372427, |
|
"grad_norm": 2.0699801445007324, |
|
"learning_rate": 3.7735849056603777e-06, |
|
"loss": 0.7372, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.12063188128291048, |
|
"grad_norm": 1.5903819799423218, |
|
"learning_rate": 3.962264150943396e-06, |
|
"loss": 0.711, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.1263762565820967, |
|
"grad_norm": 1.460195779800415, |
|
"learning_rate": 4.150943396226416e-06, |
|
"loss": 0.7038, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.13212063188128292, |
|
"grad_norm": 1.229818344116211, |
|
"learning_rate": 4.339622641509435e-06, |
|
"loss": 0.661, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.13786500718046912, |
|
"grad_norm": 1.1705495119094849, |
|
"learning_rate": 4.528301886792453e-06, |
|
"loss": 0.6991, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.14360938247965535, |
|
"grad_norm": 1.2829539775848389, |
|
"learning_rate": 4.716981132075472e-06, |
|
"loss": 0.6844, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.14935375777884155, |
|
"grad_norm": 1.1270300149917603, |
|
"learning_rate": 4.905660377358491e-06, |
|
"loss": 0.6736, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.15509813307802778, |
|
"grad_norm": 1.018497109413147, |
|
"learning_rate": 5.09433962264151e-06, |
|
"loss": 0.7077, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.16084250837721398, |
|
"grad_norm": 0.8882578611373901, |
|
"learning_rate": 5.283018867924529e-06, |
|
"loss": 0.6665, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.1665868836764002, |
|
"grad_norm": 1.0214155912399292, |
|
"learning_rate": 5.4716981132075475e-06, |
|
"loss": 0.6512, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.1723312589755864, |
|
"grad_norm": 0.9866452813148499, |
|
"learning_rate": 5.660377358490566e-06, |
|
"loss": 0.6561, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.17807563427477263, |
|
"grad_norm": 0.822456955909729, |
|
"learning_rate": 5.849056603773585e-06, |
|
"loss": 0.6137, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.18382000957395883, |
|
"grad_norm": 0.7711695432662964, |
|
"learning_rate": 6.037735849056604e-06, |
|
"loss": 0.5908, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.18956438487314506, |
|
"grad_norm": 0.9906867146492004, |
|
"learning_rate": 6.226415094339623e-06, |
|
"loss": 0.6336, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.19530876017233126, |
|
"grad_norm": 0.9756529331207275, |
|
"learning_rate": 6.415094339622642e-06, |
|
"loss": 0.6528, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.20105313547151749, |
|
"grad_norm": 0.7538486123085022, |
|
"learning_rate": 6.60377358490566e-06, |
|
"loss": 0.6377, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.20679751077070369, |
|
"grad_norm": 0.7044028639793396, |
|
"learning_rate": 6.792452830188679e-06, |
|
"loss": 0.6312, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.2125418860698899, |
|
"grad_norm": 0.766700029373169, |
|
"learning_rate": 6.981132075471699e-06, |
|
"loss": 0.6142, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.2182862613690761, |
|
"grad_norm": 0.8737772107124329, |
|
"learning_rate": 7.169811320754717e-06, |
|
"loss": 0.6278, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.22403063666826234, |
|
"grad_norm": 0.7406350374221802, |
|
"learning_rate": 7.358490566037736e-06, |
|
"loss": 0.6274, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.22977501196744854, |
|
"grad_norm": 0.6247995495796204, |
|
"learning_rate": 7.5471698113207555e-06, |
|
"loss": 0.604, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.23551938726663477, |
|
"grad_norm": 0.7143349647521973, |
|
"learning_rate": 7.735849056603775e-06, |
|
"loss": 0.6461, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.24126376256582097, |
|
"grad_norm": 0.6662243604660034, |
|
"learning_rate": 7.924528301886793e-06, |
|
"loss": 0.5815, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.2470081378650072, |
|
"grad_norm": 0.7262428402900696, |
|
"learning_rate": 8.113207547169812e-06, |
|
"loss": 0.6194, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.2527525131641934, |
|
"grad_norm": 0.6320751905441284, |
|
"learning_rate": 8.301886792452832e-06, |
|
"loss": 0.6338, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.2584968884633796, |
|
"grad_norm": 0.5772255063056946, |
|
"learning_rate": 8.49056603773585e-06, |
|
"loss": 0.6176, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.26424126376256585, |
|
"grad_norm": 0.7007628679275513, |
|
"learning_rate": 8.67924528301887e-06, |
|
"loss": 0.5683, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.26998563906175205, |
|
"grad_norm": 0.7302536368370056, |
|
"learning_rate": 8.867924528301887e-06, |
|
"loss": 0.5975, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.27573001436093825, |
|
"grad_norm": 0.6005181670188904, |
|
"learning_rate": 9.056603773584907e-06, |
|
"loss": 0.5844, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.28147438966012445, |
|
"grad_norm": 0.8314005136489868, |
|
"learning_rate": 9.245283018867926e-06, |
|
"loss": 0.6181, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.2872187649593107, |
|
"grad_norm": 0.6696397662162781, |
|
"learning_rate": 9.433962264150944e-06, |
|
"loss": 0.5741, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.2929631402584969, |
|
"grad_norm": 0.6071884632110596, |
|
"learning_rate": 9.622641509433963e-06, |
|
"loss": 0.5599, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.2987075155576831, |
|
"grad_norm": 0.6364148259162903, |
|
"learning_rate": 9.811320754716981e-06, |
|
"loss": 0.6052, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.3044518908568693, |
|
"grad_norm": 0.6715644001960754, |
|
"learning_rate": 1e-05, |
|
"loss": 0.5774, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.31019626615605556, |
|
"grad_norm": 0.5949195623397827, |
|
"learning_rate": 9.999887825938495e-06, |
|
"loss": 0.5668, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.31594064145524176, |
|
"grad_norm": 0.5966293811798096, |
|
"learning_rate": 9.999551308787183e-06, |
|
"loss": 0.5708, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.32168501675442795, |
|
"grad_norm": 0.6295287013053894, |
|
"learning_rate": 9.998990463645464e-06, |
|
"loss": 0.5788, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.32742939205361415, |
|
"grad_norm": 0.623221755027771, |
|
"learning_rate": 9.998205315678248e-06, |
|
"loss": 0.5868, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.3331737673528004, |
|
"grad_norm": 0.6538676023483276, |
|
"learning_rate": 9.997195900114833e-06, |
|
"loss": 0.5802, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.3389181426519866, |
|
"grad_norm": 0.6484891772270203, |
|
"learning_rate": 9.995962262247314e-06, |
|
"loss": 0.5543, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.3446625179511728, |
|
"grad_norm": 0.6138676404953003, |
|
"learning_rate": 9.994504457428557e-06, |
|
"loss": 0.5633, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.350406893250359, |
|
"grad_norm": 0.5668913722038269, |
|
"learning_rate": 9.99282255106972e-06, |
|
"loss": 0.5792, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.35615126854954526, |
|
"grad_norm": 0.5608384013175964, |
|
"learning_rate": 9.99091661863731e-06, |
|
"loss": 0.5884, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.36189564384873146, |
|
"grad_norm": 0.5740435719490051, |
|
"learning_rate": 9.988786745649798e-06, |
|
"loss": 0.5722, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.36764001914791766, |
|
"grad_norm": 0.5896041393280029, |
|
"learning_rate": 9.986433027673786e-06, |
|
"loss": 0.5558, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.37338439444710386, |
|
"grad_norm": 0.5800390243530273, |
|
"learning_rate": 9.983855570319716e-06, |
|
"loss": 0.5773, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.3791287697462901, |
|
"grad_norm": 0.5293853282928467, |
|
"learning_rate": 9.981054489237132e-06, |
|
"loss": 0.5709, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.3848731450454763, |
|
"grad_norm": 0.5264029502868652, |
|
"learning_rate": 9.978029910109491e-06, |
|
"loss": 0.5314, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.3906175203446625, |
|
"grad_norm": 0.5725036859512329, |
|
"learning_rate": 9.974781968648523e-06, |
|
"loss": 0.5544, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.3963618956438487, |
|
"grad_norm": 0.5662187933921814, |
|
"learning_rate": 9.971310810588141e-06, |
|
"loss": 0.5816, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.40210627094303497, |
|
"grad_norm": 0.4821569323539734, |
|
"learning_rate": 9.967616591677906e-06, |
|
"loss": 0.5054, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.40785064624222117, |
|
"grad_norm": 0.6668592691421509, |
|
"learning_rate": 9.963699477676031e-06, |
|
"loss": 0.5594, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.41359502154140737, |
|
"grad_norm": 0.5324398279190063, |
|
"learning_rate": 9.959559644341954e-06, |
|
"loss": 0.5384, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.41933939684059357, |
|
"grad_norm": 0.5210046172142029, |
|
"learning_rate": 9.95519727742844e-06, |
|
"loss": 0.5588, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.4250837721397798, |
|
"grad_norm": 0.6099966764450073, |
|
"learning_rate": 9.950612572673255e-06, |
|
"loss": 0.562, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.430828147438966, |
|
"grad_norm": 0.5562371611595154, |
|
"learning_rate": 9.945805735790383e-06, |
|
"loss": 0.5726, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.4365725227381522, |
|
"grad_norm": 0.49663493037223816, |
|
"learning_rate": 9.940776982460787e-06, |
|
"loss": 0.5486, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.4423168980373384, |
|
"grad_norm": 0.5064303278923035, |
|
"learning_rate": 9.935526538322744e-06, |
|
"loss": 0.5751, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.4480612733365247, |
|
"grad_norm": 0.5378572344779968, |
|
"learning_rate": 9.930054638961709e-06, |
|
"loss": 0.552, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.4538056486357109, |
|
"grad_norm": 0.5384443402290344, |
|
"learning_rate": 9.924361529899754e-06, |
|
"loss": 0.5612, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.4595500239348971, |
|
"grad_norm": 0.49997466802597046, |
|
"learning_rate": 9.918447466584545e-06, |
|
"loss": 0.5432, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.4652943992340833, |
|
"grad_norm": 0.5184059143066406, |
|
"learning_rate": 9.91231271437788e-06, |
|
"loss": 0.5471, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.47103877453326953, |
|
"grad_norm": 0.5947031378746033, |
|
"learning_rate": 9.905957548543794e-06, |
|
"loss": 0.5658, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.47678314983245573, |
|
"grad_norm": 0.4928087592124939, |
|
"learning_rate": 9.899382254236186e-06, |
|
"loss": 0.5503, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.48252752513164193, |
|
"grad_norm": 0.6400409936904907, |
|
"learning_rate": 9.892587126486046e-06, |
|
"loss": 0.5788, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.48827190043082813, |
|
"grad_norm": 0.5572645664215088, |
|
"learning_rate": 9.885572470188207e-06, |
|
"loss": 0.5362, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.4940162757300144, |
|
"grad_norm": 0.49279481172561646, |
|
"learning_rate": 9.878338600087658e-06, |
|
"loss": 0.5539, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.4997606510292006, |
|
"grad_norm": 0.6104176640510559, |
|
"learning_rate": 9.87088584076544e-06, |
|
"loss": 0.5706, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.5055050263283868, |
|
"grad_norm": 0.49791330099105835, |
|
"learning_rate": 9.863214526624065e-06, |
|
"loss": 0.5543, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.511249401627573, |
|
"grad_norm": 0.4971017837524414, |
|
"learning_rate": 9.85532500187252e-06, |
|
"loss": 0.5503, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.5169937769267592, |
|
"grad_norm": 0.5559049844741821, |
|
"learning_rate": 9.847217620510815e-06, |
|
"loss": 0.56, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.5227381522259454, |
|
"grad_norm": 0.45909181237220764, |
|
"learning_rate": 9.83889274631411e-06, |
|
"loss": 0.5375, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.5284825275251317, |
|
"grad_norm": 0.6156418323516846, |
|
"learning_rate": 9.830350752816386e-06, |
|
"loss": 0.5436, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.5342269028243178, |
|
"grad_norm": 0.46865999698638916, |
|
"learning_rate": 9.821592023293686e-06, |
|
"loss": 0.5317, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.5399712781235041, |
|
"grad_norm": 0.4963522255420685, |
|
"learning_rate": 9.81261695074691e-06, |
|
"loss": 0.5609, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.5457156534226902, |
|
"grad_norm": 0.5644580721855164, |
|
"learning_rate": 9.803425937884202e-06, |
|
"loss": 0.5489, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.5514600287218765, |
|
"grad_norm": 0.5057684779167175, |
|
"learning_rate": 9.794019397102852e-06, |
|
"loss": 0.5628, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.5572044040210627, |
|
"grad_norm": 0.5877383947372437, |
|
"learning_rate": 9.784397750470818e-06, |
|
"loss": 0.5329, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.5629487793202489, |
|
"grad_norm": 0.5501551628112793, |
|
"learning_rate": 9.774561429707769e-06, |
|
"loss": 0.5756, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.5686931546194351, |
|
"grad_norm": 0.5017531514167786, |
|
"learning_rate": 9.764510876165727e-06, |
|
"loss": 0.5309, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.5744375299186214, |
|
"grad_norm": 0.5698550343513489, |
|
"learning_rate": 9.754246540809257e-06, |
|
"loss": 0.5344, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.5801819052178075, |
|
"grad_norm": 0.6889923810958862, |
|
"learning_rate": 9.743768884195233e-06, |
|
"loss": 0.566, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.5859262805169938, |
|
"grad_norm": 0.5755162835121155, |
|
"learning_rate": 9.733078376452172e-06, |
|
"loss": 0.5623, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.59167065581618, |
|
"grad_norm": 0.5690771341323853, |
|
"learning_rate": 9.722175497259145e-06, |
|
"loss": 0.5472, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.5974150311153662, |
|
"grad_norm": 0.48988884687423706, |
|
"learning_rate": 9.71106073582425e-06, |
|
"loss": 0.5306, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.6031594064145525, |
|
"grad_norm": 0.5045924782752991, |
|
"learning_rate": 9.699734590862655e-06, |
|
"loss": 0.5319, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.6089037817137386, |
|
"grad_norm": 0.6457961201667786, |
|
"learning_rate": 9.688197570574238e-06, |
|
"loss": 0.5546, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.6146481570129249, |
|
"grad_norm": 0.5357460379600525, |
|
"learning_rate": 9.676450192620767e-06, |
|
"loss": 0.5485, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.6203925323121111, |
|
"grad_norm": 0.6195516586303711, |
|
"learning_rate": 9.66449298410268e-06, |
|
"loss": 0.5651, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.6261369076112973, |
|
"grad_norm": 0.532525360584259, |
|
"learning_rate": 9.652326481535434e-06, |
|
"loss": 0.5119, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.6318812829104835, |
|
"grad_norm": 0.6295329332351685, |
|
"learning_rate": 9.639951230825433e-06, |
|
"loss": 0.5788, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.6376256582096697, |
|
"grad_norm": 0.6266581416130066, |
|
"learning_rate": 9.62736778724553e-06, |
|
"loss": 0.5582, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.6433700335088559, |
|
"grad_norm": 0.6455391645431519, |
|
"learning_rate": 9.614576715410116e-06, |
|
"loss": 0.5676, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.6491144088080422, |
|
"grad_norm": 0.6532648801803589, |
|
"learning_rate": 9.60157858924978e-06, |
|
"loss": 0.5322, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.6548587841072283, |
|
"grad_norm": 0.695582389831543, |
|
"learning_rate": 9.588373991985566e-06, |
|
"loss": 0.557, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.6606031594064146, |
|
"grad_norm": 0.6813830733299255, |
|
"learning_rate": 9.574963516102795e-06, |
|
"loss": 0.5639, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.6663475347056008, |
|
"grad_norm": 0.537449836730957, |
|
"learning_rate": 9.561347763324484e-06, |
|
"loss": 0.528, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.672091910004787, |
|
"grad_norm": 0.5756135582923889, |
|
"learning_rate": 9.547527344584353e-06, |
|
"loss": 0.5461, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.6778362853039732, |
|
"grad_norm": 0.664249837398529, |
|
"learning_rate": 9.533502879999398e-06, |
|
"loss": 0.5685, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.6835806606031594, |
|
"grad_norm": 0.5206965804100037, |
|
"learning_rate": 9.519274998842084e-06, |
|
"loss": 0.5717, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.6893250359023456, |
|
"grad_norm": 0.6174411773681641, |
|
"learning_rate": 9.504844339512096e-06, |
|
"loss": 0.5338, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.6950694112015319, |
|
"grad_norm": 0.4804738461971283, |
|
"learning_rate": 9.490211549507701e-06, |
|
"loss": 0.5557, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.700813786500718, |
|
"grad_norm": 0.49055036902427673, |
|
"learning_rate": 9.475377285396692e-06, |
|
"loss": 0.5324, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.7065581617999043, |
|
"grad_norm": 0.486482709646225, |
|
"learning_rate": 9.460342212786933e-06, |
|
"loss": 0.5242, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.7123025370990905, |
|
"grad_norm": 0.549220085144043, |
|
"learning_rate": 9.445107006296488e-06, |
|
"loss": 0.5692, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.7180469123982767, |
|
"grad_norm": 0.5209650993347168, |
|
"learning_rate": 9.42967234952335e-06, |
|
"loss": 0.5685, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.7237912876974629, |
|
"grad_norm": 0.5501351952552795, |
|
"learning_rate": 9.414038935014777e-06, |
|
"loss": 0.5288, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.7295356629966491, |
|
"grad_norm": 0.5890218615531921, |
|
"learning_rate": 9.398207464236209e-06, |
|
"loss": 0.5288, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.7352800382958353, |
|
"grad_norm": 0.5494324564933777, |
|
"learning_rate": 9.382178647539794e-06, |
|
"loss": 0.541, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.7410244135950216, |
|
"grad_norm": 0.5340805053710938, |
|
"learning_rate": 9.365953204132526e-06, |
|
"loss": 0.5494, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.7467687888942077, |
|
"grad_norm": 0.4657943844795227, |
|
"learning_rate": 9.349531862043952e-06, |
|
"loss": 0.5629, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.752513164193394, |
|
"grad_norm": 0.4700133502483368, |
|
"learning_rate": 9.332915358093532e-06, |
|
"loss": 0.5265, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.7582575394925802, |
|
"grad_norm": 0.5137308239936829, |
|
"learning_rate": 9.316104437857561e-06, |
|
"loss": 0.5315, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.7640019147917664, |
|
"grad_norm": 0.5512875914573669, |
|
"learning_rate": 9.299099855635716e-06, |
|
"loss": 0.5246, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.7697462900909526, |
|
"grad_norm": 0.5698496699333191, |
|
"learning_rate": 9.28190237441722e-06, |
|
"loss": 0.5161, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.7754906653901388, |
|
"grad_norm": 0.6161168217658997, |
|
"learning_rate": 9.2645127658466e-06, |
|
"loss": 0.5358, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.781235040689325, |
|
"grad_norm": 0.5661118626594543, |
|
"learning_rate": 9.246931810189061e-06, |
|
"loss": 0.546, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.7869794159885113, |
|
"grad_norm": 0.5279558897018433, |
|
"learning_rate": 9.229160296295488e-06, |
|
"loss": 0.5096, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.7927237912876974, |
|
"grad_norm": 0.5370686650276184, |
|
"learning_rate": 9.211199021567034e-06, |
|
"loss": 0.5523, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.7984681665868837, |
|
"grad_norm": 0.6411705613136292, |
|
"learning_rate": 9.193048791919357e-06, |
|
"loss": 0.5595, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.8042125418860699, |
|
"grad_norm": 0.5257410407066345, |
|
"learning_rate": 9.174710421746445e-06, |
|
"loss": 0.5686, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.8099569171852561, |
|
"grad_norm": 0.5763169527053833, |
|
"learning_rate": 9.156184733884084e-06, |
|
"loss": 0.5252, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.8157012924844423, |
|
"grad_norm": 0.5250378847122192, |
|
"learning_rate": 9.137472559572935e-06, |
|
"loss": 0.561, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.8214456677836285, |
|
"grad_norm": 0.6087478399276733, |
|
"learning_rate": 9.118574738421236e-06, |
|
"loss": 0.5177, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.8271900430828147, |
|
"grad_norm": 0.6588859558105469, |
|
"learning_rate": 9.099492118367123e-06, |
|
"loss": 0.5578, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.832934418382001, |
|
"grad_norm": 0.48864591121673584, |
|
"learning_rate": 9.080225555640601e-06, |
|
"loss": 0.5394, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.8386787936811871, |
|
"grad_norm": 0.5315064191818237, |
|
"learning_rate": 9.0607759147251e-06, |
|
"loss": 0.5404, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.8444231689803734, |
|
"grad_norm": 0.5749916434288025, |
|
"learning_rate": 9.04114406831871e-06, |
|
"loss": 0.5114, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.8501675442795597, |
|
"grad_norm": 0.47096845507621765, |
|
"learning_rate": 9.021330897295011e-06, |
|
"loss": 0.5383, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.8559119195787458, |
|
"grad_norm": 0.5951332449913025, |
|
"learning_rate": 9.001337290663548e-06, |
|
"loss": 0.5173, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.861656294877932, |
|
"grad_norm": 0.5525330305099487, |
|
"learning_rate": 8.981164145529943e-06, |
|
"loss": 0.5428, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.8674006701771182, |
|
"grad_norm": 0.47650671005249023, |
|
"learning_rate": 8.960812367055646e-06, |
|
"loss": 0.5582, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.8731450454763044, |
|
"grad_norm": 0.542094349861145, |
|
"learning_rate": 8.940282868417321e-06, |
|
"loss": 0.4893, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.8788894207754907, |
|
"grad_norm": 0.5436213612556458, |
|
"learning_rate": 8.91957657076586e-06, |
|
"loss": 0.5531, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.8846337960746768, |
|
"grad_norm": 0.529384434223175, |
|
"learning_rate": 8.898694403185066e-06, |
|
"loss": 0.5151, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.8903781713738631, |
|
"grad_norm": 0.5005679726600647, |
|
"learning_rate": 8.877637302649962e-06, |
|
"loss": 0.5259, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.8961225466730494, |
|
"grad_norm": 0.5237105488777161, |
|
"learning_rate": 8.856406213984743e-06, |
|
"loss": 0.5442, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.9018669219722355, |
|
"grad_norm": 0.45996320247650146, |
|
"learning_rate": 8.835002089820387e-06, |
|
"loss": 0.5279, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.9076112972714218, |
|
"grad_norm": 0.5661066770553589, |
|
"learning_rate": 8.81342589055191e-06, |
|
"loss": 0.5578, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.9133556725706079, |
|
"grad_norm": 0.46714460849761963, |
|
"learning_rate": 8.791678584295276e-06, |
|
"loss": 0.5223, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.9191000478697942, |
|
"grad_norm": 0.5787307024002075, |
|
"learning_rate": 8.76976114684395e-06, |
|
"loss": 0.5285, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.9248444231689804, |
|
"grad_norm": 0.5483257174491882, |
|
"learning_rate": 8.747674561625121e-06, |
|
"loss": 0.5488, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.9305887984681666, |
|
"grad_norm": 0.4516684412956238, |
|
"learning_rate": 8.725419819655582e-06, |
|
"loss": 0.4819, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.9363331737673528, |
|
"grad_norm": 0.5912066698074341, |
|
"learning_rate": 8.702997919497247e-06, |
|
"loss": 0.5398, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.9420775490665391, |
|
"grad_norm": 0.544422447681427, |
|
"learning_rate": 8.680409867212359e-06, |
|
"loss": 0.5563, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.9478219243657252, |
|
"grad_norm": 0.615469217300415, |
|
"learning_rate": 8.657656676318346e-06, |
|
"loss": 0.5527, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.9535662996649115, |
|
"grad_norm": 0.48167160153388977, |
|
"learning_rate": 8.634739367742341e-06, |
|
"loss": 0.5031, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.9593106749640976, |
|
"grad_norm": 0.659505307674408, |
|
"learning_rate": 8.611658969775378e-06, |
|
"loss": 0.5239, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.9650550502632839, |
|
"grad_norm": 0.5576907396316528, |
|
"learning_rate": 8.588416518026248e-06, |
|
"loss": 0.5173, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.9707994255624701, |
|
"grad_norm": 0.5543246269226074, |
|
"learning_rate": 8.565013055375035e-06, |
|
"loss": 0.5346, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.9765438008616563, |
|
"grad_norm": 0.6100912690162659, |
|
"learning_rate": 8.541449631926325e-06, |
|
"loss": 0.5268, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.9822881761608425, |
|
"grad_norm": 0.47118762135505676, |
|
"learning_rate": 8.51772730496208e-06, |
|
"loss": 0.5605, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.9880325514600288, |
|
"grad_norm": 0.6019647121429443, |
|
"learning_rate": 8.49384713889421e-06, |
|
"loss": 0.5273, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.9937769267592149, |
|
"grad_norm": 0.6136192083358765, |
|
"learning_rate": 8.469810205216795e-06, |
|
"loss": 0.5659, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.9995213020584012, |
|
"grad_norm": 0.5436699986457825, |
|
"learning_rate": 8.445617582458033e-06, |
|
"loss": 0.5469, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.0052656773575874, |
|
"grad_norm": 1.5260647535324097, |
|
"learning_rate": 8.42127035613182e-06, |
|
"loss": 0.9094, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.0110100526567736, |
|
"grad_norm": 0.5587025880813599, |
|
"learning_rate": 8.396769618689064e-06, |
|
"loss": 0.4939, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.0167544279559597, |
|
"grad_norm": 0.5418161153793335, |
|
"learning_rate": 8.372116469468654e-06, |
|
"loss": 0.5211, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.022498803255146, |
|
"grad_norm": 0.6211748719215393, |
|
"learning_rate": 8.347312014648144e-06, |
|
"loss": 0.4619, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.0282431785543322, |
|
"grad_norm": 0.5461438298225403, |
|
"learning_rate": 8.32235736719411e-06, |
|
"loss": 0.4424, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.0339875538535184, |
|
"grad_norm": 0.7238538861274719, |
|
"learning_rate": 8.297253646812213e-06, |
|
"loss": 0.5283, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.0397319291527047, |
|
"grad_norm": 0.5685709714889526, |
|
"learning_rate": 8.272001979896962e-06, |
|
"loss": 0.4777, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.0454763044518909, |
|
"grad_norm": 0.6029736399650574, |
|
"learning_rate": 8.246603499481177e-06, |
|
"loss": 0.4754, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.051220679751077, |
|
"grad_norm": 0.7201405167579651, |
|
"learning_rate": 8.221059345185136e-06, |
|
"loss": 0.4864, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.0569650550502634, |
|
"grad_norm": 0.5729609131813049, |
|
"learning_rate": 8.195370663165455e-06, |
|
"loss": 0.4793, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.0627094303494495, |
|
"grad_norm": 0.6719270944595337, |
|
"learning_rate": 8.169538606063647e-06, |
|
"loss": 0.4964, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.0684538056486357, |
|
"grad_norm": 0.6572569608688354, |
|
"learning_rate": 8.143564332954426e-06, |
|
"loss": 0.5049, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.0741981809478218, |
|
"grad_norm": 0.5375853180885315, |
|
"learning_rate": 8.117449009293668e-06, |
|
"loss": 0.5056, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.0799425562470082, |
|
"grad_norm": 0.5356974005699158, |
|
"learning_rate": 8.091193806866147e-06, |
|
"loss": 0.5014, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.0856869315461943, |
|
"grad_norm": 0.5870072245597839, |
|
"learning_rate": 8.064799903732936e-06, |
|
"loss": 0.4648, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.0914313068453805, |
|
"grad_norm": 0.4977969825267792, |
|
"learning_rate": 8.038268484178566e-06, |
|
"loss": 0.4912, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.0971756821445668, |
|
"grad_norm": 0.49062633514404297, |
|
"learning_rate": 8.011600738657865e-06, |
|
"loss": 0.493, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.102920057443753, |
|
"grad_norm": 0.5364016890525818, |
|
"learning_rate": 7.98479786374257e-06, |
|
"loss": 0.4551, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.1086644327429391, |
|
"grad_norm": 0.6891268491744995, |
|
"learning_rate": 7.957861062067614e-06, |
|
"loss": 0.4907, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.1144088080421255, |
|
"grad_norm": 0.5725308060646057, |
|
"learning_rate": 7.930791542277175e-06, |
|
"loss": 0.4892, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.1201531833413116, |
|
"grad_norm": 0.5667552947998047, |
|
"learning_rate": 7.903590518970445e-06, |
|
"loss": 0.5127, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.1258975586404978, |
|
"grad_norm": 0.462298721075058, |
|
"learning_rate": 7.876259212647129e-06, |
|
"loss": 0.441, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.1316419339396842, |
|
"grad_norm": 0.5504534244537354, |
|
"learning_rate": 7.848798849652684e-06, |
|
"loss": 0.5009, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.1373863092388703, |
|
"grad_norm": 0.48440906405448914, |
|
"learning_rate": 7.821210662123284e-06, |
|
"loss": 0.4731, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.1431306845380564, |
|
"grad_norm": 0.4688943922519684, |
|
"learning_rate": 7.793495887930551e-06, |
|
"loss": 0.4456, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.1488750598372426, |
|
"grad_norm": 0.5395812392234802, |
|
"learning_rate": 7.765655770625997e-06, |
|
"loss": 0.466, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.154619435136429, |
|
"grad_norm": 0.6602702140808105, |
|
"learning_rate": 7.737691559385237e-06, |
|
"loss": 0.4981, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.160363810435615, |
|
"grad_norm": 0.48276349902153015, |
|
"learning_rate": 7.709604508951927e-06, |
|
"loss": 0.5092, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.1661081857348012, |
|
"grad_norm": 0.5397800803184509, |
|
"learning_rate": 7.68139587958148e-06, |
|
"loss": 0.4675, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.1718525610339876, |
|
"grad_norm": 0.5717769861221313, |
|
"learning_rate": 7.653066936984504e-06, |
|
"loss": 0.5035, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.1775969363331737, |
|
"grad_norm": 0.5147746801376343, |
|
"learning_rate": 7.6246189522700205e-06, |
|
"loss": 0.5191, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.18334131163236, |
|
"grad_norm": 0.4830034077167511, |
|
"learning_rate": 7.596053201888425e-06, |
|
"loss": 0.4659, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.1890856869315463, |
|
"grad_norm": 0.5149533748626709, |
|
"learning_rate": 7.56737096757421e-06, |
|
"loss": 0.4666, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.1948300622307324, |
|
"grad_norm": 0.47677546739578247, |
|
"learning_rate": 7.538573536288466e-06, |
|
"loss": 0.4842, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.2005744375299185, |
|
"grad_norm": 0.42174142599105835, |
|
"learning_rate": 7.509662200161122e-06, |
|
"loss": 0.4519, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.206318812829105, |
|
"grad_norm": 0.5489972829818726, |
|
"learning_rate": 7.480638256432977e-06, |
|
"loss": 0.5119, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.212063188128291, |
|
"grad_norm": 0.4667467474937439, |
|
"learning_rate": 7.4515030073974915e-06, |
|
"loss": 0.4918, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.2178075634274772, |
|
"grad_norm": 0.5094060897827148, |
|
"learning_rate": 7.422257760342351e-06, |
|
"loss": 0.4915, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.2235519387266636, |
|
"grad_norm": 0.5071128606796265, |
|
"learning_rate": 7.392903827490814e-06, |
|
"loss": 0.4598, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.2292963140258497, |
|
"grad_norm": 0.49673011898994446, |
|
"learning_rate": 7.363442525942827e-06, |
|
"loss": 0.4467, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.2350406893250359, |
|
"grad_norm": 0.46016961336135864, |
|
"learning_rate": 7.333875177615931e-06, |
|
"loss": 0.4927, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.2407850646242222, |
|
"grad_norm": 0.5728297829627991, |
|
"learning_rate": 7.304203109185947e-06, |
|
"loss": 0.5002, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.2465294399234084, |
|
"grad_norm": 0.43362411856651306, |
|
"learning_rate": 7.274427652027444e-06, |
|
"loss": 0.4451, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.2522738152225945, |
|
"grad_norm": 0.496272474527359, |
|
"learning_rate": 7.244550142154009e-06, |
|
"loss": 0.4794, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.2580181905217809, |
|
"grad_norm": 0.5590081810951233, |
|
"learning_rate": 7.214571920158293e-06, |
|
"loss": 0.4845, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.263762565820967, |
|
"grad_norm": 0.5159772038459778, |
|
"learning_rate": 7.1844943311518665e-06, |
|
"loss": 0.5251, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.2695069411201532, |
|
"grad_norm": 0.4696570932865143, |
|
"learning_rate": 7.1543187247048525e-06, |
|
"loss": 0.5033, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.2752513164193395, |
|
"grad_norm": 0.5043511390686035, |
|
"learning_rate": 7.124046454785387e-06, |
|
"loss": 0.4739, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.2809956917185257, |
|
"grad_norm": 0.5755232572555542, |
|
"learning_rate": 7.093678879698858e-06, |
|
"loss": 0.5209, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.2867400670177118, |
|
"grad_norm": 0.46288853883743286, |
|
"learning_rate": 7.063217362026957e-06, |
|
"loss": 0.4692, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.292484442316898, |
|
"grad_norm": 0.4486297369003296, |
|
"learning_rate": 7.032663268566547e-06, |
|
"loss": 0.4581, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.2982288176160843, |
|
"grad_norm": 0.5111509561538696, |
|
"learning_rate": 7.002017970268336e-06, |
|
"loss": 0.4648, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.3039731929152705, |
|
"grad_norm": 0.5335001349449158, |
|
"learning_rate": 6.97128284217535e-06, |
|
"loss": 0.508, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.3097175682144566, |
|
"grad_norm": 0.5523168444633484, |
|
"learning_rate": 6.9404592633612486e-06, |
|
"loss": 0.546, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.3154619435136428, |
|
"grad_norm": 0.4908071458339691, |
|
"learning_rate": 6.909548616868444e-06, |
|
"loss": 0.4959, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 1.3212063188128291, |
|
"grad_norm": 0.4557272791862488, |
|
"learning_rate": 6.878552289646041e-06, |
|
"loss": 0.4815, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.3269506941120153, |
|
"grad_norm": 0.4479963779449463, |
|
"learning_rate": 6.847471672487607e-06, |
|
"loss": 0.4357, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 1.3326950694112014, |
|
"grad_norm": 0.4967019855976105, |
|
"learning_rate": 6.816308159968761e-06, |
|
"loss": 0.539, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.3384394447103878, |
|
"grad_norm": 0.4799559414386749, |
|
"learning_rate": 6.7850631503846165e-06, |
|
"loss": 0.4802, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 1.344183820009574, |
|
"grad_norm": 0.485273540019989, |
|
"learning_rate": 6.753738045687021e-06, |
|
"loss": 0.4945, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.34992819530876, |
|
"grad_norm": 0.4190736711025238, |
|
"learning_rate": 6.722334251421665e-06, |
|
"loss": 0.4703, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.3556725706079464, |
|
"grad_norm": 0.4951339364051819, |
|
"learning_rate": 6.690853176665007e-06, |
|
"loss": 0.5339, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 1.3614169459071326, |
|
"grad_norm": 0.4466400742530823, |
|
"learning_rate": 6.659296233961055e-06, |
|
"loss": 0.4521, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 1.3671613212063187, |
|
"grad_norm": 0.4460606575012207, |
|
"learning_rate": 6.627664839257979e-06, |
|
"loss": 0.4827, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 1.372905696505505, |
|
"grad_norm": 0.49249327182769775, |
|
"learning_rate": 6.595960411844589e-06, |
|
"loss": 0.5051, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 1.3786500718046912, |
|
"grad_norm": 0.491413414478302, |
|
"learning_rate": 6.564184374286636e-06, |
|
"loss": 0.5031, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.3843944471038774, |
|
"grad_norm": 0.42033085227012634, |
|
"learning_rate": 6.532338152363001e-06, |
|
"loss": 0.4646, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 1.3901388224030637, |
|
"grad_norm": 0.4750801920890808, |
|
"learning_rate": 6.500423175001705e-06, |
|
"loss": 0.4337, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 1.39588319770225, |
|
"grad_norm": 0.5122845768928528, |
|
"learning_rate": 6.468440874215801e-06, |
|
"loss": 0.5052, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 1.401627573001436, |
|
"grad_norm": 0.4859732985496521, |
|
"learning_rate": 6.43639268503912e-06, |
|
"loss": 0.499, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 1.4073719483006224, |
|
"grad_norm": 0.512253999710083, |
|
"learning_rate": 6.40428004546188e-06, |
|
"loss": 0.5064, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.4131163235998085, |
|
"grad_norm": 0.46881169080734253, |
|
"learning_rate": 6.372104396366162e-06, |
|
"loss": 0.4765, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.4188606988989947, |
|
"grad_norm": 0.4910268187522888, |
|
"learning_rate": 6.339867181461265e-06, |
|
"loss": 0.5059, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 1.424605074198181, |
|
"grad_norm": 0.43128618597984314, |
|
"learning_rate": 6.307569847218917e-06, |
|
"loss": 0.4667, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.4303494494973672, |
|
"grad_norm": 0.4424154460430145, |
|
"learning_rate": 6.275213842808383e-06, |
|
"loss": 0.4834, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.4360938247965533, |
|
"grad_norm": 0.49621695280075073, |
|
"learning_rate": 6.242800620031434e-06, |
|
"loss": 0.4701, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.4418382000957397, |
|
"grad_norm": 0.4383918344974518, |
|
"learning_rate": 6.2103316332572095e-06, |
|
"loss": 0.5017, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 1.4475825753949259, |
|
"grad_norm": 0.4955291748046875, |
|
"learning_rate": 6.177808339356954e-06, |
|
"loss": 0.5253, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.453326950694112, |
|
"grad_norm": 0.43212026357650757, |
|
"learning_rate": 6.14523219763866e-06, |
|
"loss": 0.455, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 1.4590713259932984, |
|
"grad_norm": 0.44476839900016785, |
|
"learning_rate": 6.112604669781572e-06, |
|
"loss": 0.4844, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 1.4648157012924845, |
|
"grad_norm": 0.46556708216667175, |
|
"learning_rate": 6.079927219770623e-06, |
|
"loss": 0.4957, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.4705600765916707, |
|
"grad_norm": 0.4209665060043335, |
|
"learning_rate": 6.047201313830724e-06, |
|
"loss": 0.4859, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 1.4763044518908568, |
|
"grad_norm": 0.4632583558559418, |
|
"learning_rate": 6.014428420360987e-06, |
|
"loss": 0.5136, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 1.4820488271900432, |
|
"grad_norm": 0.4351138472557068, |
|
"learning_rate": 5.9816100098688456e-06, |
|
"loss": 0.4618, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 1.4877932024892293, |
|
"grad_norm": 0.4440339207649231, |
|
"learning_rate": 5.948747554904054e-06, |
|
"loss": 0.5211, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 1.4935375777884154, |
|
"grad_norm": 0.49033647775650024, |
|
"learning_rate": 5.915842529992632e-06, |
|
"loss": 0.4836, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.4992819530876016, |
|
"grad_norm": 0.45823630690574646, |
|
"learning_rate": 5.8828964115706925e-06, |
|
"loss": 0.4288, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 1.505026328386788, |
|
"grad_norm": 0.4555763304233551, |
|
"learning_rate": 5.849910677918205e-06, |
|
"loss": 0.4548, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 1.510770703685974, |
|
"grad_norm": 0.46555453538894653, |
|
"learning_rate": 5.816886809092651e-06, |
|
"loss": 0.4688, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 1.5165150789851602, |
|
"grad_norm": 0.5020288825035095, |
|
"learning_rate": 5.783826286862631e-06, |
|
"loss": 0.5108, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 1.5222594542843466, |
|
"grad_norm": 0.410932719707489, |
|
"learning_rate": 5.750730594641367e-06, |
|
"loss": 0.4571, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.5280038295835328, |
|
"grad_norm": 0.46809419989585876, |
|
"learning_rate": 5.717601217420143e-06, |
|
"loss": 0.4691, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 1.533748204882719, |
|
"grad_norm": 0.44337236881256104, |
|
"learning_rate": 5.68443964170168e-06, |
|
"loss": 0.477, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 1.5394925801819053, |
|
"grad_norm": 0.4952318072319031, |
|
"learning_rate": 5.6512473554334294e-06, |
|
"loss": 0.49, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 1.5452369554810914, |
|
"grad_norm": 0.46753644943237305, |
|
"learning_rate": 5.618025847940817e-06, |
|
"loss": 0.4848, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 1.5509813307802776, |
|
"grad_norm": 0.45212316513061523, |
|
"learning_rate": 5.584776609860414e-06, |
|
"loss": 0.4696, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.556725706079464, |
|
"grad_norm": 0.5053378939628601, |
|
"learning_rate": 5.551501133073048e-06, |
|
"loss": 0.5447, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 1.56247008137865, |
|
"grad_norm": 0.4040236473083496, |
|
"learning_rate": 5.518200910636875e-06, |
|
"loss": 0.4678, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 1.5682144566778362, |
|
"grad_norm": 0.4419972002506256, |
|
"learning_rate": 5.4848774367203715e-06, |
|
"loss": 0.4994, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 1.5739588319770226, |
|
"grad_norm": 0.43512895703315735, |
|
"learning_rate": 5.451532206535306e-06, |
|
"loss": 0.5056, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 1.5797032072762087, |
|
"grad_norm": 0.492079496383667, |
|
"learning_rate": 5.418166716269636e-06, |
|
"loss": 0.4957, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.5854475825753949, |
|
"grad_norm": 0.4276512861251831, |
|
"learning_rate": 5.384782463020385e-06, |
|
"loss": 0.4901, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 1.5911919578745812, |
|
"grad_norm": 0.39996013045310974, |
|
"learning_rate": 5.351380944726465e-06, |
|
"loss": 0.4862, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 1.5969363331737674, |
|
"grad_norm": 0.3694234788417816, |
|
"learning_rate": 5.317963660101464e-06, |
|
"loss": 0.4218, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 1.6026807084729535, |
|
"grad_norm": 0.4611278772354126, |
|
"learning_rate": 5.284532108566396e-06, |
|
"loss": 0.4908, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 1.6084250837721399, |
|
"grad_norm": 0.435573548078537, |
|
"learning_rate": 5.251087790182428e-06, |
|
"loss": 0.4905, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.614169459071326, |
|
"grad_norm": 0.4718412458896637, |
|
"learning_rate": 5.217632205583574e-06, |
|
"loss": 0.5055, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 1.6199138343705122, |
|
"grad_norm": 0.41079840064048767, |
|
"learning_rate": 5.184166855909355e-06, |
|
"loss": 0.4872, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 1.6256582096696985, |
|
"grad_norm": 0.42057737708091736, |
|
"learning_rate": 5.150693242737444e-06, |
|
"loss": 0.5028, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 1.6314025849688847, |
|
"grad_norm": 0.43592604994773865, |
|
"learning_rate": 5.117212868016303e-06, |
|
"loss": 0.5228, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 1.6371469602680708, |
|
"grad_norm": 0.4700012803077698, |
|
"learning_rate": 5.083727233997775e-06, |
|
"loss": 0.4603, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.6428913355672572, |
|
"grad_norm": 0.4413910210132599, |
|
"learning_rate": 5.05023784316969e-06, |
|
"loss": 0.4787, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 1.6486357108664431, |
|
"grad_norm": 0.4498383700847626, |
|
"learning_rate": 5.016746198188439e-06, |
|
"loss": 0.5171, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 1.6543800861656295, |
|
"grad_norm": 0.3725832998752594, |
|
"learning_rate": 4.983253801811562e-06, |
|
"loss": 0.447, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 1.6601244614648158, |
|
"grad_norm": 0.41565757989883423, |
|
"learning_rate": 4.949762156830312e-06, |
|
"loss": 0.5029, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 1.6658688367640018, |
|
"grad_norm": 0.4530212879180908, |
|
"learning_rate": 4.916272766002227e-06, |
|
"loss": 0.487, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.6716132120631881, |
|
"grad_norm": 0.44627729058265686, |
|
"learning_rate": 4.882787131983698e-06, |
|
"loss": 0.4529, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 1.6773575873623745, |
|
"grad_norm": 0.4549354612827301, |
|
"learning_rate": 4.849306757262558e-06, |
|
"loss": 0.4706, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 1.6831019626615604, |
|
"grad_norm": 0.48366779088974, |
|
"learning_rate": 4.8158331440906466e-06, |
|
"loss": 0.5149, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 1.6888463379607468, |
|
"grad_norm": 0.46364232897758484, |
|
"learning_rate": 4.7823677944164285e-06, |
|
"loss": 0.5396, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 1.694590713259933, |
|
"grad_norm": 0.43879616260528564, |
|
"learning_rate": 4.748912209817572e-06, |
|
"loss": 0.4773, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.700335088559119, |
|
"grad_norm": 0.3839658200740814, |
|
"learning_rate": 4.715467891433607e-06, |
|
"loss": 0.4477, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 1.7060794638583054, |
|
"grad_norm": 0.4020622968673706, |
|
"learning_rate": 4.682036339898537e-06, |
|
"loss": 0.4777, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 1.7118238391574916, |
|
"grad_norm": 0.42523112893104553, |
|
"learning_rate": 4.6486190552735375e-06, |
|
"loss": 0.5096, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 1.7175682144566777, |
|
"grad_norm": 0.43856796622276306, |
|
"learning_rate": 4.615217536979616e-06, |
|
"loss": 0.4802, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 1.723312589755864, |
|
"grad_norm": 0.3956157863140106, |
|
"learning_rate": 4.581833283730367e-06, |
|
"loss": 0.4479, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.7290569650550502, |
|
"grad_norm": 0.4652330279350281, |
|
"learning_rate": 4.548467793464696e-06, |
|
"loss": 0.5231, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 1.7348013403542364, |
|
"grad_norm": 0.3997798562049866, |
|
"learning_rate": 4.515122563279631e-06, |
|
"loss": 0.4558, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 1.7405457156534228, |
|
"grad_norm": 0.4045880138874054, |
|
"learning_rate": 4.481799089363127e-06, |
|
"loss": 0.4589, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 1.746290090952609, |
|
"grad_norm": 0.4493541717529297, |
|
"learning_rate": 4.448498866926952e-06, |
|
"loss": 0.4963, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 1.752034466251795, |
|
"grad_norm": 0.3999355435371399, |
|
"learning_rate": 4.415223390139588e-06, |
|
"loss": 0.5129, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.7577788415509814, |
|
"grad_norm": 0.40998125076293945, |
|
"learning_rate": 4.381974152059184e-06, |
|
"loss": 0.49, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 1.7635232168501676, |
|
"grad_norm": 0.3941861391067505, |
|
"learning_rate": 4.348752644566573e-06, |
|
"loss": 0.4556, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 1.7692675921493537, |
|
"grad_norm": 0.4115658402442932, |
|
"learning_rate": 4.315560358298321e-06, |
|
"loss": 0.4988, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 1.77501196744854, |
|
"grad_norm": 0.4041730463504791, |
|
"learning_rate": 4.2823987825798575e-06, |
|
"loss": 0.4673, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 1.7807563427477262, |
|
"grad_norm": 0.3939685821533203, |
|
"learning_rate": 4.249269405358634e-06, |
|
"loss": 0.4542, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.7865007180469124, |
|
"grad_norm": 0.4312926232814789, |
|
"learning_rate": 4.2161737131373695e-06, |
|
"loss": 0.5143, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 1.7922450933460987, |
|
"grad_norm": 0.4078906178474426, |
|
"learning_rate": 4.183113190907349e-06, |
|
"loss": 0.475, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 1.7979894686452849, |
|
"grad_norm": 0.42950204014778137, |
|
"learning_rate": 4.150089322081797e-06, |
|
"loss": 0.4607, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 1.803733843944471, |
|
"grad_norm": 0.37166374921798706, |
|
"learning_rate": 4.1171035884293075e-06, |
|
"loss": 0.4488, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 1.8094782192436574, |
|
"grad_norm": 0.38504746556282043, |
|
"learning_rate": 4.084157470007371e-06, |
|
"loss": 0.4823, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.8152225945428435, |
|
"grad_norm": 0.45277538895606995, |
|
"learning_rate": 4.051252445095946e-06, |
|
"loss": 0.5264, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 1.8209669698420297, |
|
"grad_norm": 0.3826143443584442, |
|
"learning_rate": 4.018389990131156e-06, |
|
"loss": 0.4834, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 1.826711345141216, |
|
"grad_norm": 0.36567309498786926, |
|
"learning_rate": 3.985571579639013e-06, |
|
"loss": 0.4675, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 1.832455720440402, |
|
"grad_norm": 0.38398852944374084, |
|
"learning_rate": 3.952798686169279e-06, |
|
"loss": 0.468, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 1.8382000957395883, |
|
"grad_norm": 0.46181172132492065, |
|
"learning_rate": 3.920072780229378e-06, |
|
"loss": 0.4756, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.8439444710387747, |
|
"grad_norm": 0.4305135905742645, |
|
"learning_rate": 3.887395330218429e-06, |
|
"loss": 0.5031, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 1.8496888463379606, |
|
"grad_norm": 0.36271166801452637, |
|
"learning_rate": 3.854767802361342e-06, |
|
"loss": 0.4411, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 1.855433221637147, |
|
"grad_norm": 0.441859632730484, |
|
"learning_rate": 3.822191660643047e-06, |
|
"loss": 0.5007, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 1.8611775969363333, |
|
"grad_norm": 0.382299542427063, |
|
"learning_rate": 3.789668366742792e-06, |
|
"loss": 0.5045, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 1.8669219722355193, |
|
"grad_norm": 0.408326119184494, |
|
"learning_rate": 3.7571993799685675e-06, |
|
"loss": 0.4843, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.8726663475347056, |
|
"grad_norm": 0.42178499698638916, |
|
"learning_rate": 3.7247861571916183e-06, |
|
"loss": 0.4641, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 1.8784107228338918, |
|
"grad_norm": 0.4093269407749176, |
|
"learning_rate": 3.6924301527810856e-06, |
|
"loss": 0.4825, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 1.884155098133078, |
|
"grad_norm": 0.41595694422721863, |
|
"learning_rate": 3.6601328185387364e-06, |
|
"loss": 0.4981, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 1.8898994734322643, |
|
"grad_norm": 0.38833877444267273, |
|
"learning_rate": 3.6278956036338397e-06, |
|
"loss": 0.46, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 1.8956438487314504, |
|
"grad_norm": 0.4010329842567444, |
|
"learning_rate": 3.5957199545381216e-06, |
|
"loss": 0.4538, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.9013882240306366, |
|
"grad_norm": 0.42216378450393677, |
|
"learning_rate": 3.5636073149608824e-06, |
|
"loss": 0.477, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 1.907132599329823, |
|
"grad_norm": 0.39908266067504883, |
|
"learning_rate": 3.5315591257842e-06, |
|
"loss": 0.4271, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 1.912876974629009, |
|
"grad_norm": 0.41619524359703064, |
|
"learning_rate": 3.4995768249982975e-06, |
|
"loss": 0.4588, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 1.9186213499281952, |
|
"grad_norm": 0.3965836763381958, |
|
"learning_rate": 3.467661847637001e-06, |
|
"loss": 0.501, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 1.9243657252273816, |
|
"grad_norm": 0.3913739025592804, |
|
"learning_rate": 3.4358156257133644e-06, |
|
"loss": 0.5025, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.9301101005265677, |
|
"grad_norm": 0.42657947540283203, |
|
"learning_rate": 3.404039588155413e-06, |
|
"loss": 0.4434, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.9358544758257539, |
|
"grad_norm": 0.381879597902298, |
|
"learning_rate": 3.372335160742022e-06, |
|
"loss": 0.4315, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 1.9415988511249402, |
|
"grad_norm": 0.3740031123161316, |
|
"learning_rate": 3.3407037660389474e-06, |
|
"loss": 0.4561, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 1.9473432264241264, |
|
"grad_norm": 0.4027283489704132, |
|
"learning_rate": 3.3091468233349934e-06, |
|
"loss": 0.521, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 1.9530876017233125, |
|
"grad_norm": 0.4218166172504425, |
|
"learning_rate": 3.2776657485783357e-06, |
|
"loss": 0.4838, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.958831977022499, |
|
"grad_norm": 0.42057153582572937, |
|
"learning_rate": 3.246261954312979e-06, |
|
"loss": 0.4954, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 1.964576352321685, |
|
"grad_norm": 0.4057151675224304, |
|
"learning_rate": 3.2149368496153856e-06, |
|
"loss": 0.5009, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.9703207276208712, |
|
"grad_norm": 0.43289145827293396, |
|
"learning_rate": 3.1836918400312387e-06, |
|
"loss": 0.4539, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 1.9760651029200575, |
|
"grad_norm": 0.41700872778892517, |
|
"learning_rate": 3.152528327512395e-06, |
|
"loss": 0.4734, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 1.9818094782192437, |
|
"grad_norm": 0.4243963062763214, |
|
"learning_rate": 3.1214477103539585e-06, |
|
"loss": 0.4877, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.9875538535184298, |
|
"grad_norm": 0.4166891276836395, |
|
"learning_rate": 3.0904513831315563e-06, |
|
"loss": 0.5031, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 1.9932982288176162, |
|
"grad_norm": 0.41003891825675964, |
|
"learning_rate": 3.059540736638751e-06, |
|
"loss": 0.5046, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 1.9990426041168023, |
|
"grad_norm": 0.3934224843978882, |
|
"learning_rate": 3.028717157824652e-06, |
|
"loss": 0.4309, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 2.0047869794159885, |
|
"grad_norm": 1.5628243684768677, |
|
"learning_rate": 2.9979820297316652e-06, |
|
"loss": 0.7723, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 2.010531354715175, |
|
"grad_norm": 0.5319270491600037, |
|
"learning_rate": 2.9673367314334533e-06, |
|
"loss": 0.5022, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 2.016275730014361, |
|
"grad_norm": 0.5007623434066772, |
|
"learning_rate": 2.936782637973044e-06, |
|
"loss": 0.4484, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 2.022020105313547, |
|
"grad_norm": 0.4784458577632904, |
|
"learning_rate": 2.9063211203011443e-06, |
|
"loss": 0.454, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 2.0277644806127335, |
|
"grad_norm": 0.3986125886440277, |
|
"learning_rate": 2.8759535452146128e-06, |
|
"loss": 0.4522, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 2.0335088559119194, |
|
"grad_norm": 0.4448578953742981, |
|
"learning_rate": 2.8456812752951483e-06, |
|
"loss": 0.4543, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 2.039253231211106, |
|
"grad_norm": 0.46886104345321655, |
|
"learning_rate": 2.815505668848136e-06, |
|
"loss": 0.4183, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 2.044997606510292, |
|
"grad_norm": 0.438160240650177, |
|
"learning_rate": 2.785428079841709e-06, |
|
"loss": 0.4507, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 2.050741981809478, |
|
"grad_norm": 0.412517249584198, |
|
"learning_rate": 2.755449857845992e-06, |
|
"loss": 0.4479, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 2.0564863571086645, |
|
"grad_norm": 0.4198094308376312, |
|
"learning_rate": 2.725572347972558e-06, |
|
"loss": 0.4193, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 2.062230732407851, |
|
"grad_norm": 0.4086361825466156, |
|
"learning_rate": 2.6957968908140546e-06, |
|
"loss": 0.4264, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 2.0679751077070367, |
|
"grad_norm": 0.4513247013092041, |
|
"learning_rate": 2.666124822384071e-06, |
|
"loss": 0.4473, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 2.073719483006223, |
|
"grad_norm": 0.3780079483985901, |
|
"learning_rate": 2.636557474057173e-06, |
|
"loss": 0.4005, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 2.0794638583054095, |
|
"grad_norm": 0.3676167130470276, |
|
"learning_rate": 2.607096172509187e-06, |
|
"loss": 0.4392, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 2.0852082336045954, |
|
"grad_norm": 0.44044387340545654, |
|
"learning_rate": 2.5777422396576503e-06, |
|
"loss": 0.4911, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 2.0909526089037818, |
|
"grad_norm": 0.3587888479232788, |
|
"learning_rate": 2.5484969926025114e-06, |
|
"loss": 0.4374, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 2.096696984202968, |
|
"grad_norm": 0.41746485233306885, |
|
"learning_rate": 2.5193617435670244e-06, |
|
"loss": 0.4883, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 2.102441359502154, |
|
"grad_norm": 0.38463401794433594, |
|
"learning_rate": 2.4903377998388783e-06, |
|
"loss": 0.4396, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 2.1081857348013404, |
|
"grad_norm": 0.395751953125, |
|
"learning_rate": 2.461426463711535e-06, |
|
"loss": 0.4429, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 2.113930110100527, |
|
"grad_norm": 0.3884614109992981, |
|
"learning_rate": 2.4326290324257896e-06, |
|
"loss": 0.4227, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 2.1196744853997127, |
|
"grad_norm": 0.3690508306026459, |
|
"learning_rate": 2.403946798111576e-06, |
|
"loss": 0.4141, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 2.125418860698899, |
|
"grad_norm": 0.4512092173099518, |
|
"learning_rate": 2.37538104772998e-06, |
|
"loss": 0.4846, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 2.131163235998085, |
|
"grad_norm": 0.341816782951355, |
|
"learning_rate": 2.3469330630154974e-06, |
|
"loss": 0.383, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 2.1369076112972714, |
|
"grad_norm": 0.39507344365119934, |
|
"learning_rate": 2.318604120418521e-06, |
|
"loss": 0.4662, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 2.1426519865964577, |
|
"grad_norm": 0.36571794748306274, |
|
"learning_rate": 2.2903954910480746e-06, |
|
"loss": 0.4621, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 2.1483963618956436, |
|
"grad_norm": 0.3868046700954437, |
|
"learning_rate": 2.2623084406147643e-06, |
|
"loss": 0.4306, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 2.15414073719483, |
|
"grad_norm": 0.37293747067451477, |
|
"learning_rate": 2.234344229374003e-06, |
|
"loss": 0.441, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 2.1598851124940164, |
|
"grad_norm": 0.38192519545555115, |
|
"learning_rate": 2.2065041120694487e-06, |
|
"loss": 0.442, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 2.1656294877932023, |
|
"grad_norm": 0.373929888010025, |
|
"learning_rate": 2.178789337876716e-06, |
|
"loss": 0.4448, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 2.1713738630923887, |
|
"grad_norm": 0.40156957507133484, |
|
"learning_rate": 2.151201150347318e-06, |
|
"loss": 0.4359, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 2.177118238391575, |
|
"grad_norm": 0.3525901138782501, |
|
"learning_rate": 2.123740787352872e-06, |
|
"loss": 0.4045, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 2.182862613690761, |
|
"grad_norm": 0.3916381001472473, |
|
"learning_rate": 2.096409481029556e-06, |
|
"loss": 0.4423, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 2.1886069889899473, |
|
"grad_norm": 0.3852902948856354, |
|
"learning_rate": 2.069208457722828e-06, |
|
"loss": 0.421, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 2.1943513642891337, |
|
"grad_norm": 0.40378549695014954, |
|
"learning_rate": 2.042138937932388e-06, |
|
"loss": 0.4254, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 2.2000957395883196, |
|
"grad_norm": 0.3902316987514496, |
|
"learning_rate": 2.015202136257432e-06, |
|
"loss": 0.4891, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 2.205840114887506, |
|
"grad_norm": 0.3631356954574585, |
|
"learning_rate": 1.988399261342135e-06, |
|
"loss": 0.4048, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 2.2115844901866923, |
|
"grad_norm": 0.3928806483745575, |
|
"learning_rate": 1.9617315158214363e-06, |
|
"loss": 0.4654, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 2.2173288654858783, |
|
"grad_norm": 0.3718385100364685, |
|
"learning_rate": 1.935200096267064e-06, |
|
"loss": 0.4473, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 2.2230732407850646, |
|
"grad_norm": 0.3905206024646759, |
|
"learning_rate": 1.908806193133855e-06, |
|
"loss": 0.4315, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 2.228817616084251, |
|
"grad_norm": 0.37699949741363525, |
|
"learning_rate": 1.8825509907063328e-06, |
|
"loss": 0.4605, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 2.234561991383437, |
|
"grad_norm": 0.4052666425704956, |
|
"learning_rate": 1.856435667045577e-06, |
|
"loss": 0.4561, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 2.2403063666826233, |
|
"grad_norm": 0.41954728960990906, |
|
"learning_rate": 1.8304613939363531e-06, |
|
"loss": 0.451, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 2.2460507419818097, |
|
"grad_norm": 0.3681614398956299, |
|
"learning_rate": 1.8046293368345485e-06, |
|
"loss": 0.4332, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 2.2517951172809956, |
|
"grad_norm": 0.3315896987915039, |
|
"learning_rate": 1.7789406548148647e-06, |
|
"loss": 0.4176, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 2.257539492580182, |
|
"grad_norm": 0.4171721935272217, |
|
"learning_rate": 1.7533965005188242e-06, |
|
"loss": 0.5205, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 2.2632838678793683, |
|
"grad_norm": 0.35167965292930603, |
|
"learning_rate": 1.7279980201030382e-06, |
|
"loss": 0.4135, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 2.2690282431785542, |
|
"grad_norm": 0.38858693838119507, |
|
"learning_rate": 1.7027463531877897e-06, |
|
"loss": 0.4432, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 2.2747726184777406, |
|
"grad_norm": 0.3728155791759491, |
|
"learning_rate": 1.677642632805892e-06, |
|
"loss": 0.4512, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 2.280516993776927, |
|
"grad_norm": 0.395333856344223, |
|
"learning_rate": 1.6526879853518558e-06, |
|
"loss": 0.4461, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 2.286261369076113, |
|
"grad_norm": 0.37841910123825073, |
|
"learning_rate": 1.6278835305313462e-06, |
|
"loss": 0.4539, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 2.2920057443752992, |
|
"grad_norm": 0.3504939675331116, |
|
"learning_rate": 1.6032303813109368e-06, |
|
"loss": 0.4478, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 2.297750119674485, |
|
"grad_norm": 0.383880615234375, |
|
"learning_rate": 1.578729643868181e-06, |
|
"loss": 0.4544, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.3034944949736715, |
|
"grad_norm": 0.39890047907829285, |
|
"learning_rate": 1.5543824175419691e-06, |
|
"loss": 0.4616, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 2.309238870272858, |
|
"grad_norm": 0.3388942778110504, |
|
"learning_rate": 1.5301897947832063e-06, |
|
"loss": 0.4472, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 2.3149832455720443, |
|
"grad_norm": 0.3660285770893097, |
|
"learning_rate": 1.5061528611057917e-06, |
|
"loss": 0.4071, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 2.32072762087123, |
|
"grad_norm": 0.38760945200920105, |
|
"learning_rate": 1.4822726950379207e-06, |
|
"loss": 0.4753, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 2.3264719961704166, |
|
"grad_norm": 0.36534708738327026, |
|
"learning_rate": 1.4585503680736756e-06, |
|
"loss": 0.4351, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 2.3322163714696025, |
|
"grad_norm": 0.3707476258277893, |
|
"learning_rate": 1.4349869446249664e-06, |
|
"loss": 0.43, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 2.337960746768789, |
|
"grad_norm": 0.36531203985214233, |
|
"learning_rate": 1.4115834819737534e-06, |
|
"loss": 0.3951, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 2.343705122067975, |
|
"grad_norm": 0.39617377519607544, |
|
"learning_rate": 1.3883410302246237e-06, |
|
"loss": 0.4387, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 2.349449497367161, |
|
"grad_norm": 0.36693063378334045, |
|
"learning_rate": 1.3652606322576606e-06, |
|
"loss": 0.4014, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 2.3551938726663475, |
|
"grad_norm": 0.3821011781692505, |
|
"learning_rate": 1.3423433236816563e-06, |
|
"loss": 0.4526, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.360938247965534, |
|
"grad_norm": 0.3760710060596466, |
|
"learning_rate": 1.3195901327876426e-06, |
|
"loss": 0.4402, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 2.36668262326472, |
|
"grad_norm": 0.3526499271392822, |
|
"learning_rate": 1.2970020805027555e-06, |
|
"loss": 0.4167, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 2.372426998563906, |
|
"grad_norm": 0.3931331932544708, |
|
"learning_rate": 1.2745801803444192e-06, |
|
"loss": 0.4686, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 2.3781713738630925, |
|
"grad_norm": 0.40180233120918274, |
|
"learning_rate": 1.25232543837488e-06, |
|
"loss": 0.4679, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 2.3839157491622784, |
|
"grad_norm": 0.35506054759025574, |
|
"learning_rate": 1.2302388531560515e-06, |
|
"loss": 0.4335, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 2.389660124461465, |
|
"grad_norm": 0.3469352126121521, |
|
"learning_rate": 1.2083214157047257e-06, |
|
"loss": 0.4631, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 2.395404499760651, |
|
"grad_norm": 0.3637499213218689, |
|
"learning_rate": 1.186574109448091e-06, |
|
"loss": 0.4489, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 2.401148875059837, |
|
"grad_norm": 0.3483814597129822, |
|
"learning_rate": 1.164997910179615e-06, |
|
"loss": 0.4659, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 2.4068932503590235, |
|
"grad_norm": 0.38259443640708923, |
|
"learning_rate": 1.1435937860152579e-06, |
|
"loss": 0.453, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 2.41263762565821, |
|
"grad_norm": 0.34772536158561707, |
|
"learning_rate": 1.1223626973500395e-06, |
|
"loss": 0.4076, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.4183820009573957, |
|
"grad_norm": 0.36162224411964417, |
|
"learning_rate": 1.1013055968149343e-06, |
|
"loss": 0.4482, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 2.424126376256582, |
|
"grad_norm": 0.3481464385986328, |
|
"learning_rate": 1.0804234292341426e-06, |
|
"loss": 0.4694, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 2.4298707515557685, |
|
"grad_norm": 0.3406570553779602, |
|
"learning_rate": 1.0597171315826805e-06, |
|
"loss": 0.4026, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 2.4356151268549544, |
|
"grad_norm": 0.359840452671051, |
|
"learning_rate": 1.0391876329443534e-06, |
|
"loss": 0.4212, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 2.4413595021541408, |
|
"grad_norm": 0.41036349534988403, |
|
"learning_rate": 1.0188358544700583e-06, |
|
"loss": 0.4463, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 2.447103877453327, |
|
"grad_norm": 0.3857036828994751, |
|
"learning_rate": 9.986627093364542e-07, |
|
"loss": 0.479, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 2.452848252752513, |
|
"grad_norm": 0.3274599611759186, |
|
"learning_rate": 9.786691027049893e-07, |
|
"loss": 0.3942, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 2.4585926280516994, |
|
"grad_norm": 0.41199931502342224, |
|
"learning_rate": 9.588559316812906e-07, |
|
"loss": 0.4954, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 2.4643370033508853, |
|
"grad_norm": 0.36387866735458374, |
|
"learning_rate": 9.392240852749007e-07, |
|
"loss": 0.4519, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 2.4700813786500717, |
|
"grad_norm": 0.3550173342227936, |
|
"learning_rate": 9.197744443594003e-07, |
|
"loss": 0.4146, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.475825753949258, |
|
"grad_norm": 0.36893606185913086, |
|
"learning_rate": 9.005078816328772e-07, |
|
"loss": 0.4837, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 2.4815701292484444, |
|
"grad_norm": 0.3411610424518585, |
|
"learning_rate": 8.814252615787661e-07, |
|
"loss": 0.4311, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 2.4873145045476304, |
|
"grad_norm": 0.3465381860733032, |
|
"learning_rate": 8.625274404270662e-07, |
|
"loss": 0.4731, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 2.4930588798468167, |
|
"grad_norm": 0.3729943633079529, |
|
"learning_rate": 8.438152661159165e-07, |
|
"loss": 0.4194, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 2.4988032551460027, |
|
"grad_norm": 0.4155563414096832, |
|
"learning_rate": 8.252895782535569e-07, |
|
"loss": 0.4698, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 2.504547630445189, |
|
"grad_norm": 0.3166390061378479, |
|
"learning_rate": 8.069512080806441e-07, |
|
"loss": 0.392, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 2.5102920057443754, |
|
"grad_norm": 0.3697630763053894, |
|
"learning_rate": 7.88800978432967e-07, |
|
"loss": 0.4642, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 2.5160363810435618, |
|
"grad_norm": 0.4319329559803009, |
|
"learning_rate": 7.708397037045129e-07, |
|
"loss": 0.4768, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 2.5217807563427477, |
|
"grad_norm": 0.32879865169525146, |
|
"learning_rate": 7.530681898109393e-07, |
|
"loss": 0.4141, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 2.527525131641934, |
|
"grad_norm": 0.3696901798248291, |
|
"learning_rate": 7.35487234153402e-07, |
|
"loss": 0.4802, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 2.53326950694112, |
|
"grad_norm": 0.3675316274166107, |
|
"learning_rate": 7.180976255827809e-07, |
|
"loss": 0.4756, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 2.5390138822403063, |
|
"grad_norm": 0.33102041482925415, |
|
"learning_rate": 7.009001443642843e-07, |
|
"loss": 0.3936, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 2.5447582575394927, |
|
"grad_norm": 0.32732781767845154, |
|
"learning_rate": 6.838955621424404e-07, |
|
"loss": 0.3896, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 2.550502632838679, |
|
"grad_norm": 0.3652030825614929, |
|
"learning_rate": 6.67084641906468e-07, |
|
"loss": 0.4773, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 2.556247008137865, |
|
"grad_norm": 0.3629269003868103, |
|
"learning_rate": 6.50468137956049e-07, |
|
"loss": 0.4434, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 2.5619913834370514, |
|
"grad_norm": 0.34360983967781067, |
|
"learning_rate": 6.340467958674762e-07, |
|
"loss": 0.4265, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 2.5677357587362373, |
|
"grad_norm": 0.3844148814678192, |
|
"learning_rate": 6.178213524602061e-07, |
|
"loss": 0.4666, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 2.5734801340354236, |
|
"grad_norm": 0.36176225543022156, |
|
"learning_rate": 6.017925357637932e-07, |
|
"loss": 0.4188, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 2.57922450933461, |
|
"grad_norm": 0.35700544714927673, |
|
"learning_rate": 5.859610649852249e-07, |
|
"loss": 0.426, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 2.584968884633796, |
|
"grad_norm": 0.3662126958370209, |
|
"learning_rate": 5.703276504766514e-07, |
|
"loss": 0.4135, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.5907132599329823, |
|
"grad_norm": 0.33082348108291626, |
|
"learning_rate": 5.548929937035147e-07, |
|
"loss": 0.3923, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 2.5964576352321687, |
|
"grad_norm": 0.351136714220047, |
|
"learning_rate": 5.396577872130676e-07, |
|
"loss": 0.4475, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 2.6022020105313546, |
|
"grad_norm": 0.3499198853969574, |
|
"learning_rate": 5.246227146033089e-07, |
|
"loss": 0.4704, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 2.607946385830541, |
|
"grad_norm": 0.3229888677597046, |
|
"learning_rate": 5.097884504922996e-07, |
|
"loss": 0.4099, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 2.6136907611297273, |
|
"grad_norm": 0.37059321999549866, |
|
"learning_rate": 4.951556604879049e-07, |
|
"loss": 0.5134, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 2.6194351364289132, |
|
"grad_norm": 0.35366615653038025, |
|
"learning_rate": 4.807250011579168e-07, |
|
"loss": 0.442, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 2.6251795117280996, |
|
"grad_norm": 0.33588507771492004, |
|
"learning_rate": 4.6649712000060297e-07, |
|
"loss": 0.3842, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 2.6309238870272855, |
|
"grad_norm": 0.33627060055732727, |
|
"learning_rate": 4.5247265541564836e-07, |
|
"loss": 0.4408, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 2.636668262326472, |
|
"grad_norm": 0.3534417450428009, |
|
"learning_rate": 4.386522366755169e-07, |
|
"loss": 0.4385, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 2.6424126376256583, |
|
"grad_norm": 0.3756772577762604, |
|
"learning_rate": 4.250364838972065e-07, |
|
"loss": 0.4684, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.6481570129248446, |
|
"grad_norm": 0.33255767822265625, |
|
"learning_rate": 4.116260080144352e-07, |
|
"loss": 0.4493, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 2.6539013882240305, |
|
"grad_norm": 0.3787069618701935, |
|
"learning_rate": 3.98421410750221e-07, |
|
"loss": 0.4556, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 2.659645763523217, |
|
"grad_norm": 0.35434430837631226, |
|
"learning_rate": 3.854232845898859e-07, |
|
"loss": 0.441, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 2.665390138822403, |
|
"grad_norm": 0.331163614988327, |
|
"learning_rate": 3.7263221275447125e-07, |
|
"loss": 0.4376, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 2.671134514121589, |
|
"grad_norm": 0.3512961268424988, |
|
"learning_rate": 3.60048769174568e-07, |
|
"loss": 0.438, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 2.6768788894207756, |
|
"grad_norm": 0.3708810806274414, |
|
"learning_rate": 3.4767351846456744e-07, |
|
"loss": 0.4711, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 2.682623264719962, |
|
"grad_norm": 0.35665786266326904, |
|
"learning_rate": 3.355070158973212e-07, |
|
"loss": 0.4434, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 2.688367640019148, |
|
"grad_norm": 0.3402157723903656, |
|
"learning_rate": 3.235498073792342e-07, |
|
"loss": 0.4436, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 2.694112015318334, |
|
"grad_norm": 0.3638489842414856, |
|
"learning_rate": 3.118024294257621e-07, |
|
"loss": 0.4612, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 2.69985639061752, |
|
"grad_norm": 0.3516465425491333, |
|
"learning_rate": 3.002654091373453e-07, |
|
"loss": 0.4177, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.7056007659167065, |
|
"grad_norm": 0.3707068860530853, |
|
"learning_rate": 2.889392641757527e-07, |
|
"loss": 0.4764, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 2.711345141215893, |
|
"grad_norm": 0.3246111571788788, |
|
"learning_rate": 2.778245027408566e-07, |
|
"loss": 0.4262, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 2.7170895165150792, |
|
"grad_norm": 0.37666356563568115, |
|
"learning_rate": 2.669216235478295e-07, |
|
"loss": 0.4879, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 2.722833891814265, |
|
"grad_norm": 0.3712378144264221, |
|
"learning_rate": 2.562311158047692e-07, |
|
"loss": 0.4568, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 2.7285782671134515, |
|
"grad_norm": 0.35843536257743835, |
|
"learning_rate": 2.45753459190744e-07, |
|
"loss": 0.4473, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 2.7343226424126374, |
|
"grad_norm": 0.36265286803245544, |
|
"learning_rate": 2.354891238342738e-07, |
|
"loss": 0.4247, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 2.740067017711824, |
|
"grad_norm": 0.35885879397392273, |
|
"learning_rate": 2.254385702922318e-07, |
|
"loss": 0.4544, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 2.74581139301101, |
|
"grad_norm": 0.3318557143211365, |
|
"learning_rate": 2.1560224952918373e-07, |
|
"loss": 0.3918, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 2.751555768310196, |
|
"grad_norm": 0.3384019732475281, |
|
"learning_rate": 2.0598060289714893e-07, |
|
"loss": 0.3935, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 2.7573001436093825, |
|
"grad_norm": 0.37354743480682373, |
|
"learning_rate": 1.9657406211579966e-07, |
|
"loss": 0.477, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.763044518908569, |
|
"grad_norm": 0.3306199312210083, |
|
"learning_rate": 1.8738304925308926e-07, |
|
"loss": 0.4606, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 2.7687888942077548, |
|
"grad_norm": 0.337941974401474, |
|
"learning_rate": 1.7840797670631572e-07, |
|
"loss": 0.4581, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 2.774533269506941, |
|
"grad_norm": 0.34849002957344055, |
|
"learning_rate": 1.6964924718361364e-07, |
|
"loss": 0.4416, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 2.7802776448061275, |
|
"grad_norm": 0.3213178515434265, |
|
"learning_rate": 1.6110725368589041e-07, |
|
"loss": 0.4177, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 2.7860220201053134, |
|
"grad_norm": 0.3672527074813843, |
|
"learning_rate": 1.5278237948918585e-07, |
|
"loss": 0.4831, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 2.7917663954045, |
|
"grad_norm": 0.3290295898914337, |
|
"learning_rate": 1.4467499812748143e-07, |
|
"loss": 0.4421, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 2.7975107707036857, |
|
"grad_norm": 0.32079824805259705, |
|
"learning_rate": 1.3678547337593494e-07, |
|
"loss": 0.4536, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 2.803255146002872, |
|
"grad_norm": 0.34473419189453125, |
|
"learning_rate": 1.2911415923456017e-07, |
|
"loss": 0.4474, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 2.8089995213020584, |
|
"grad_norm": 0.34892538189888, |
|
"learning_rate": 1.2166139991234227e-07, |
|
"loss": 0.4153, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 2.814743896601245, |
|
"grad_norm": 0.35367730259895325, |
|
"learning_rate": 1.1442752981179527e-07, |
|
"loss": 0.4633, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.8204882719004307, |
|
"grad_norm": 0.34787774085998535, |
|
"learning_rate": 1.0741287351395402e-07, |
|
"loss": 0.4833, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 2.826232647199617, |
|
"grad_norm": 0.34205448627471924, |
|
"learning_rate": 1.0061774576381411e-07, |
|
"loss": 0.443, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 2.831977022498803, |
|
"grad_norm": 0.3662581741809845, |
|
"learning_rate": 9.404245145620717e-08, |
|
"loss": 0.4819, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 2.8377213977979894, |
|
"grad_norm": 0.3398209512233734, |
|
"learning_rate": 8.768728562211948e-08, |
|
"loss": 0.3826, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 2.8434657730971757, |
|
"grad_norm": 0.3441368341445923, |
|
"learning_rate": 8.155253341545655e-08, |
|
"loss": 0.4825, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 2.849210148396362, |
|
"grad_norm": 0.36746805906295776, |
|
"learning_rate": 7.563847010024716e-08, |
|
"loss": 0.4402, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 2.854954523695548, |
|
"grad_norm": 0.3405812978744507, |
|
"learning_rate": 6.994536103829164e-08, |
|
"loss": 0.4468, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 2.8606988989947344, |
|
"grad_norm": 0.33339643478393555, |
|
"learning_rate": 6.447346167725688e-08, |
|
"loss": 0.4059, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 2.8664432742939203, |
|
"grad_norm": 0.3409021496772766, |
|
"learning_rate": 5.9223017539213335e-08, |
|
"loss": 0.4844, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 2.8721876495931067, |
|
"grad_norm": 0.30741554498672485, |
|
"learning_rate": 5.4194264209617705e-08, |
|
"loss": 0.4113, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.877932024892293, |
|
"grad_norm": 0.34918904304504395, |
|
"learning_rate": 4.9387427326745287e-08, |
|
"loss": 0.4648, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 2.8836764001914794, |
|
"grad_norm": 0.3559444844722748, |
|
"learning_rate": 4.4802722571561374e-08, |
|
"loss": 0.4712, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 2.8894207754906653, |
|
"grad_norm": 0.3617500960826874, |
|
"learning_rate": 4.044035565804793e-08, |
|
"loss": 0.4212, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 2.8951651507898517, |
|
"grad_norm": 0.3423933982849121, |
|
"learning_rate": 3.6300522323969855e-08, |
|
"loss": 0.3823, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 2.9009095260890376, |
|
"grad_norm": 0.32970568537712097, |
|
"learning_rate": 3.2383408322095856e-08, |
|
"loss": 0.483, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 2.906653901388224, |
|
"grad_norm": 0.32338789105415344, |
|
"learning_rate": 2.8689189411859607e-08, |
|
"loss": 0.4111, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 2.9123982766874104, |
|
"grad_norm": 0.36587268114089966, |
|
"learning_rate": 2.5218031351478268e-08, |
|
"loss": 0.5196, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 2.9181426519865967, |
|
"grad_norm": 0.372019499540329, |
|
"learning_rate": 2.1970089890509527e-08, |
|
"loss": 0.4153, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 2.9238870272857826, |
|
"grad_norm": 0.3676818907260895, |
|
"learning_rate": 1.8945510762868325e-08, |
|
"loss": 0.445, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 2.929631402584969, |
|
"grad_norm": 0.33755603432655334, |
|
"learning_rate": 1.614442968028429e-08, |
|
"loss": 0.4542, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.935375777884155, |
|
"grad_norm": 0.3397558629512787, |
|
"learning_rate": 1.3566972326214956e-08, |
|
"loss": 0.4402, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 2.9411201531833413, |
|
"grad_norm": 0.3535856604576111, |
|
"learning_rate": 1.1213254350202486e-08, |
|
"loss": 0.4235, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 2.9468645284825277, |
|
"grad_norm": 0.324677973985672, |
|
"learning_rate": 9.083381362690603e-09, |
|
"loss": 0.4315, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 2.9526089037817136, |
|
"grad_norm": 0.3455667495727539, |
|
"learning_rate": 7.177448930279496e-09, |
|
"loss": 0.4506, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 2.9583532790809, |
|
"grad_norm": 0.35920631885528564, |
|
"learning_rate": 5.495542571443135e-09, |
|
"loss": 0.4154, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 2.9640976543800863, |
|
"grad_norm": 0.3734237849712372, |
|
"learning_rate": 4.037737752686788e-09, |
|
"loss": 0.4393, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 2.9698420296792722, |
|
"grad_norm": 0.34162041544914246, |
|
"learning_rate": 2.8040998851674996e-09, |
|
"loss": 0.4448, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 2.9755864049784586, |
|
"grad_norm": 0.3412844240665436, |
|
"learning_rate": 1.7946843217514498e-09, |
|
"loss": 0.4195, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 2.981330780277645, |
|
"grad_norm": 0.35345590114593506, |
|
"learning_rate": 1.009536354537044e-09, |
|
"loss": 0.3924, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 2.987075155576831, |
|
"grad_norm": 0.36779072880744934, |
|
"learning_rate": 4.486912128182086e-10, |
|
"loss": 0.5045, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.9928195308760173, |
|
"grad_norm": 0.3303414583206177, |
|
"learning_rate": 1.1217406150676457e-10, |
|
"loss": 0.4413, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 2.998563906175203, |
|
"grad_norm": 0.33037903904914856, |
|
"learning_rate": 0.0, |
|
"loss": 0.4208, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 2.998563906175203, |
|
"step": 522, |
|
"total_flos": 538113478688768.0, |
|
"train_loss": 0.5088067185490525, |
|
"train_runtime": 25669.0251, |
|
"train_samples_per_second": 1.953, |
|
"train_steps_per_second": 0.02 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 522, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 538113478688768.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|