|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9994672349493873, |
|
"eval_steps": 500, |
|
"global_step": 938, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 4.4688, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 5.7051, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 4.6101, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 5.1161, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 4.7314, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 5.4174, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.0, |
|
"learning_rate": 0.0, |
|
"loss": 5.3351, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 267.06263341170535, |
|
"learning_rate": 2.1276595744680852e-07, |
|
"loss": 4.3761, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 256.6399066161345, |
|
"learning_rate": 4.2553191489361704e-07, |
|
"loss": 4.911, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 259.8979037908876, |
|
"learning_rate": 6.382978723404255e-07, |
|
"loss": 4.5067, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 206.34210154966675, |
|
"learning_rate": 8.510638297872341e-07, |
|
"loss": 4.1617, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 206.34210154966675, |
|
"learning_rate": 8.510638297872341e-07, |
|
"loss": 4.7024, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 104.25967551689651, |
|
"learning_rate": 1.0638297872340427e-06, |
|
"loss": 2.8783, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 104.25967551689651, |
|
"learning_rate": 1.0638297872340427e-06, |
|
"loss": 3.6247, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 301.56418557125875, |
|
"learning_rate": 1.276595744680851e-06, |
|
"loss": 3.8274, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 158.0102677208041, |
|
"learning_rate": 1.4893617021276596e-06, |
|
"loss": 2.5054, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 144.94261605863426, |
|
"learning_rate": 1.7021276595744682e-06, |
|
"loss": 2.6479, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 77.83778037631348, |
|
"learning_rate": 1.9148936170212767e-06, |
|
"loss": 1.7461, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 110.09097679168819, |
|
"learning_rate": 2.1276595744680853e-06, |
|
"loss": 2.005, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 64.67189678957659, |
|
"learning_rate": 2.340425531914894e-06, |
|
"loss": 1.5011, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 85.70086135587184, |
|
"learning_rate": 2.553191489361702e-06, |
|
"loss": 1.4231, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 51.55499709016579, |
|
"learning_rate": 2.765957446808511e-06, |
|
"loss": 1.3443, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 39.458031493922036, |
|
"learning_rate": 2.978723404255319e-06, |
|
"loss": 1.1782, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 30.718044498953375, |
|
"learning_rate": 3.191489361702128e-06, |
|
"loss": 1.0572, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 54.03307642588333, |
|
"learning_rate": 3.4042553191489363e-06, |
|
"loss": 1.0557, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 45.15061216435745, |
|
"learning_rate": 3.6170212765957453e-06, |
|
"loss": 0.9468, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 85.20725035566016, |
|
"learning_rate": 3.8297872340425535e-06, |
|
"loss": 1.1611, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 61.03528945297965, |
|
"learning_rate": 4.042553191489362e-06, |
|
"loss": 0.9764, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 32.57984313596529, |
|
"learning_rate": 4.255319148936171e-06, |
|
"loss": 0.9542, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 46.71878018713139, |
|
"learning_rate": 4.468085106382979e-06, |
|
"loss": 1.0575, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 28.228382153829948, |
|
"learning_rate": 4.680851063829788e-06, |
|
"loss": 0.8927, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 32.305084533281175, |
|
"learning_rate": 4.893617021276596e-06, |
|
"loss": 0.9158, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 38.80822759339413, |
|
"learning_rate": 5.106382978723404e-06, |
|
"loss": 1.0127, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 16.765291089343936, |
|
"learning_rate": 5.319148936170213e-06, |
|
"loss": 0.8579, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 46.21887734855352, |
|
"learning_rate": 5.531914893617022e-06, |
|
"loss": 1.0595, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 33.51542422627954, |
|
"learning_rate": 5.744680851063831e-06, |
|
"loss": 0.7873, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 20.62520698421828, |
|
"learning_rate": 5.957446808510638e-06, |
|
"loss": 0.7463, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 32.86540760673144, |
|
"learning_rate": 6.170212765957447e-06, |
|
"loss": 0.9393, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 16.78339589062748, |
|
"learning_rate": 6.382978723404256e-06, |
|
"loss": 0.7036, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 20.638888081681362, |
|
"learning_rate": 6.595744680851064e-06, |
|
"loss": 0.8178, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 24.957942318493615, |
|
"learning_rate": 6.808510638297873e-06, |
|
"loss": 0.8192, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 18.753926303171866, |
|
"learning_rate": 7.021276595744682e-06, |
|
"loss": 0.8053, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 20.380738488767815, |
|
"learning_rate": 7.234042553191491e-06, |
|
"loss": 0.7775, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 15.358291004654351, |
|
"learning_rate": 7.446808510638298e-06, |
|
"loss": 0.768, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 28.01853531392082, |
|
"learning_rate": 7.659574468085107e-06, |
|
"loss": 0.6495, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 90.12246442870764, |
|
"learning_rate": 7.872340425531916e-06, |
|
"loss": 1.1254, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 24.048650390369733, |
|
"learning_rate": 8.085106382978723e-06, |
|
"loss": 0.8769, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 79.51846666706089, |
|
"learning_rate": 8.297872340425532e-06, |
|
"loss": 1.2554, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 20.341370812622667, |
|
"learning_rate": 8.510638297872341e-06, |
|
"loss": 0.7153, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 48.50233018085147, |
|
"learning_rate": 8.72340425531915e-06, |
|
"loss": 1.0358, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 27.71805876780665, |
|
"learning_rate": 8.936170212765958e-06, |
|
"loss": 0.7539, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 44.56567439721268, |
|
"learning_rate": 9.148936170212767e-06, |
|
"loss": 1.0363, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 43.96614589042312, |
|
"learning_rate": 9.361702127659576e-06, |
|
"loss": 1.0823, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 36.336438544851525, |
|
"learning_rate": 9.574468085106385e-06, |
|
"loss": 0.9189, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 20.360154033813146, |
|
"learning_rate": 9.787234042553192e-06, |
|
"loss": 0.7138, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 16.415639113466074, |
|
"learning_rate": 1e-05, |
|
"loss": 0.8165, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 17.154150115372023, |
|
"learning_rate": 1.0212765957446808e-05, |
|
"loss": 0.8003, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 23.485978008365016, |
|
"learning_rate": 1.0425531914893619e-05, |
|
"loss": 0.8017, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 18.921319860871982, |
|
"learning_rate": 1.0638297872340426e-05, |
|
"loss": 0.6803, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 28.458932361446607, |
|
"learning_rate": 1.0851063829787233e-05, |
|
"loss": 0.9205, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 16.224593364238817, |
|
"learning_rate": 1.1063829787234044e-05, |
|
"loss": 0.7286, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 30.78498419558807, |
|
"learning_rate": 1.1276595744680851e-05, |
|
"loss": 0.9564, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 20.29389328745947, |
|
"learning_rate": 1.1489361702127662e-05, |
|
"loss": 0.8442, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 18.23986171950765, |
|
"learning_rate": 1.170212765957447e-05, |
|
"loss": 0.7971, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 13.607742168916245, |
|
"learning_rate": 1.1914893617021277e-05, |
|
"loss": 0.7479, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 15.589517783052612, |
|
"learning_rate": 1.2127659574468087e-05, |
|
"loss": 0.6971, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 11.730568689593209, |
|
"learning_rate": 1.2340425531914895e-05, |
|
"loss": 0.6136, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 17.919191925814953, |
|
"learning_rate": 1.2553191489361702e-05, |
|
"loss": 0.6786, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 46.93984753189794, |
|
"learning_rate": 1.2765957446808513e-05, |
|
"loss": 1.1316, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 32.097199934080784, |
|
"learning_rate": 1.297872340425532e-05, |
|
"loss": 0.8888, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 23.37535729938417, |
|
"learning_rate": 1.3191489361702127e-05, |
|
"loss": 0.7891, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 16.127458600501658, |
|
"learning_rate": 1.3404255319148938e-05, |
|
"loss": 0.5976, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 13.056670968499086, |
|
"learning_rate": 1.3617021276595745e-05, |
|
"loss": 0.6602, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 18.80577219024569, |
|
"learning_rate": 1.3829787234042556e-05, |
|
"loss": 0.7343, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 18.0606093200878, |
|
"learning_rate": 1.4042553191489363e-05, |
|
"loss": 0.7229, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 13.62137898397275, |
|
"learning_rate": 1.425531914893617e-05, |
|
"loss": 0.7897, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 10.783139067640777, |
|
"learning_rate": 1.4468085106382981e-05, |
|
"loss": 0.5573, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 28.888686230080594, |
|
"learning_rate": 1.4680851063829789e-05, |
|
"loss": 0.7468, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 24.696181606034965, |
|
"learning_rate": 1.4893617021276596e-05, |
|
"loss": 0.6625, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 32.68489697451064, |
|
"learning_rate": 1.5106382978723407e-05, |
|
"loss": 1.0193, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 20.801300816024295, |
|
"learning_rate": 1.5319148936170214e-05, |
|
"loss": 0.6352, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 10.55513433584252, |
|
"learning_rate": 1.5531914893617023e-05, |
|
"loss": 0.7007, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 15.716140126443527, |
|
"learning_rate": 1.5744680851063832e-05, |
|
"loss": 0.7187, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 21.85726232694579, |
|
"learning_rate": 1.595744680851064e-05, |
|
"loss": 0.7349, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 20.964587712026038, |
|
"learning_rate": 1.6170212765957446e-05, |
|
"loss": 0.7143, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 8.250030396732411, |
|
"learning_rate": 1.6382978723404255e-05, |
|
"loss": 0.6892, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 21.20006063299826, |
|
"learning_rate": 1.6595744680851064e-05, |
|
"loss": 0.8109, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 19.09608912821583, |
|
"learning_rate": 1.6808510638297873e-05, |
|
"loss": 0.8082, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 20.52502129407863, |
|
"learning_rate": 1.7021276595744682e-05, |
|
"loss": 0.6783, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 39.20021747253875, |
|
"learning_rate": 1.723404255319149e-05, |
|
"loss": 0.9711, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 31.095085040142717, |
|
"learning_rate": 1.74468085106383e-05, |
|
"loss": 0.8283, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 37.26207899740689, |
|
"learning_rate": 1.765957446808511e-05, |
|
"loss": 1.0039, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 16.273798667976465, |
|
"learning_rate": 1.7872340425531915e-05, |
|
"loss": 0.6814, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 32.374520273410155, |
|
"learning_rate": 1.8085106382978724e-05, |
|
"loss": 0.9674, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 34.748891951713134, |
|
"learning_rate": 1.8297872340425533e-05, |
|
"loss": 1.1996, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 26.013510708052237, |
|
"learning_rate": 1.8510638297872342e-05, |
|
"loss": 0.9372, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 19.99677186530767, |
|
"learning_rate": 1.872340425531915e-05, |
|
"loss": 0.8718, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 16.62199424485097, |
|
"learning_rate": 1.893617021276596e-05, |
|
"loss": 0.9384, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 20.111532243609883, |
|
"learning_rate": 1.914893617021277e-05, |
|
"loss": 0.9813, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 15.191733826484391, |
|
"learning_rate": 1.9361702127659575e-05, |
|
"loss": 0.8069, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 23.926673285223416, |
|
"learning_rate": 1.9574468085106384e-05, |
|
"loss": 0.8308, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 23.218285916806728, |
|
"learning_rate": 1.9787234042553193e-05, |
|
"loss": 0.9754, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 17.76104315857583, |
|
"learning_rate": 2e-05, |
|
"loss": 0.8006, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 20.836820707421783, |
|
"learning_rate": 1.9999930723752516e-05, |
|
"loss": 0.8543, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 17.0438780130028, |
|
"learning_rate": 1.9999722895969904e-05, |
|
"loss": 0.8443, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 22.36760800612908, |
|
"learning_rate": 1.9999376519531668e-05, |
|
"loss": 0.8841, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 11.874378681555248, |
|
"learning_rate": 1.999889159923694e-05, |
|
"loss": 0.6501, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 17.245286092714633, |
|
"learning_rate": 1.9998268141804412e-05, |
|
"loss": 0.8119, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 16.693402688517093, |
|
"learning_rate": 1.9997506155872246e-05, |
|
"loss": 0.7683, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 18.100428165378997, |
|
"learning_rate": 1.999660565199794e-05, |
|
"loss": 0.8259, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 10.547692312079072, |
|
"learning_rate": 1.9995566642658208e-05, |
|
"loss": 0.6651, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 10.19017430503209, |
|
"learning_rate": 1.9994389142248775e-05, |
|
"loss": 0.7619, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 13.012636488876366, |
|
"learning_rate": 1.999307316708421e-05, |
|
"loss": 0.6931, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 13.672739554862506, |
|
"learning_rate": 1.9991618735397674e-05, |
|
"loss": 0.7014, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 11.268690129927412, |
|
"learning_rate": 1.9990025867340683e-05, |
|
"loss": 0.7041, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 13.835706135922516, |
|
"learning_rate": 1.998829458498282e-05, |
|
"loss": 0.7568, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 12.580781013528279, |
|
"learning_rate": 1.998642491231143e-05, |
|
"loss": 0.7155, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 11.88807897059113, |
|
"learning_rate": 1.9984416875231303e-05, |
|
"loss": 0.7374, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 15.58619342416967, |
|
"learning_rate": 1.9982270501564286e-05, |
|
"loss": 0.8355, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 14.670349037930592, |
|
"learning_rate": 1.9979985821048925e-05, |
|
"loss": 0.623, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 17.596509727191997, |
|
"learning_rate": 1.997756286534004e-05, |
|
"loss": 0.7664, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 16.388736277383142, |
|
"learning_rate": 1.9975001668008282e-05, |
|
"loss": 0.6749, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 11.99481967344947, |
|
"learning_rate": 1.9972302264539686e-05, |
|
"loss": 0.6724, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 20.53435074276929, |
|
"learning_rate": 1.9969464692335153e-05, |
|
"loss": 0.7242, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 11.475488488305645, |
|
"learning_rate": 1.996648899070996e-05, |
|
"loss": 0.7357, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 25.553564042295182, |
|
"learning_rate": 1.996337520089319e-05, |
|
"loss": 0.7927, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 8.109394353226076, |
|
"learning_rate": 1.9960123366027187e-05, |
|
"loss": 0.7367, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 8.254315086779002, |
|
"learning_rate": 1.995673353116692e-05, |
|
"loss": 0.5336, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 28.75262073669196, |
|
"learning_rate": 1.995320574327941e-05, |
|
"loss": 0.9592, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 22.09118124557931, |
|
"learning_rate": 1.9949540051243032e-05, |
|
"loss": 0.6935, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 19.95406582271437, |
|
"learning_rate": 1.9945736505846866e-05, |
|
"loss": 0.8254, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 13.525596071741782, |
|
"learning_rate": 1.9941795159789983e-05, |
|
"loss": 0.6531, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 9.891408326838208, |
|
"learning_rate": 1.9937716067680712e-05, |
|
"loss": 0.7162, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 23.11321488500328, |
|
"learning_rate": 1.9933499286035896e-05, |
|
"loss": 0.7962, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 14.482317722843773, |
|
"learning_rate": 1.9929144873280092e-05, |
|
"loss": 0.7631, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 19.49882212964946, |
|
"learning_rate": 1.992465288974478e-05, |
|
"loss": 0.7617, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 14.59122368824527, |
|
"learning_rate": 1.992002339766751e-05, |
|
"loss": 0.6722, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 14.253058587354259, |
|
"learning_rate": 1.9915256461191054e-05, |
|
"loss": 0.6869, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 22.646528058916722, |
|
"learning_rate": 1.99103521463625e-05, |
|
"loss": 0.858, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 19.599804799046947, |
|
"learning_rate": 1.9905310521132353e-05, |
|
"loss": 0.8475, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 7.459796818444622, |
|
"learning_rate": 1.9900131655353597e-05, |
|
"loss": 0.6509, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 14.795548081947178, |
|
"learning_rate": 1.9894815620780705e-05, |
|
"loss": 0.781, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 19.287512528853632, |
|
"learning_rate": 1.9889362491068658e-05, |
|
"loss": 0.7946, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 11.780467440280454, |
|
"learning_rate": 1.9883772341771937e-05, |
|
"loss": 0.6937, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 17.590934140385546, |
|
"learning_rate": 1.9878045250343445e-05, |
|
"loss": 0.8808, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 16.151381586194, |
|
"learning_rate": 1.9872181296133473e-05, |
|
"loss": 0.8076, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 10.326349558310788, |
|
"learning_rate": 1.986618056038856e-05, |
|
"loss": 0.8052, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 9.391393835716272, |
|
"learning_rate": 1.9860043126250407e-05, |
|
"loss": 0.7323, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 13.654055394779414, |
|
"learning_rate": 1.9853769078754685e-05, |
|
"loss": 0.7861, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 13.20685189778904, |
|
"learning_rate": 1.9847358504829896e-05, |
|
"loss": 0.7136, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 8.948428090341855, |
|
"learning_rate": 1.9840811493296134e-05, |
|
"loss": 0.6935, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 9.441488025541567, |
|
"learning_rate": 1.9834128134863885e-05, |
|
"loss": 0.7222, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 12.322015322175998, |
|
"learning_rate": 1.982730852213274e-05, |
|
"loss": 0.7606, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 6.325468800857368, |
|
"learning_rate": 1.982035274959014e-05, |
|
"loss": 0.5735, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 6.406666981105762, |
|
"learning_rate": 1.9813260913610048e-05, |
|
"loss": 0.5348, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 17.612437547183127, |
|
"learning_rate": 1.9806033112451616e-05, |
|
"loss": 0.6477, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 18.78091352132794, |
|
"learning_rate": 1.9798669446257844e-05, |
|
"loss": 0.7878, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 15.546105512429289, |
|
"learning_rate": 1.979117001705415e-05, |
|
"loss": 0.852, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 16.923786294030265, |
|
"learning_rate": 1.9783534928747006e-05, |
|
"loss": 0.7754, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 13.076565312797653, |
|
"learning_rate": 1.9775764287122464e-05, |
|
"loss": 0.7425, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 14.51221649438122, |
|
"learning_rate": 1.9767858199844697e-05, |
|
"loss": 0.693, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 16.76943181344085, |
|
"learning_rate": 1.9759816776454527e-05, |
|
"loss": 0.8376, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 10.230125019828748, |
|
"learning_rate": 1.9751640128367872e-05, |
|
"loss": 0.7238, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 24.10748192789658, |
|
"learning_rate": 1.9743328368874237e-05, |
|
"loss": 0.8927, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 24.836153220553506, |
|
"learning_rate": 1.973488161313512e-05, |
|
"loss": 0.8796, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 15.039263981577998, |
|
"learning_rate": 1.972629997818243e-05, |
|
"loss": 0.7177, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 7.166311769976143, |
|
"learning_rate": 1.9717583582916862e-05, |
|
"loss": 0.7274, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 13.36802727443076, |
|
"learning_rate": 1.970873254810625e-05, |
|
"loss": 0.7425, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 12.26776202136434, |
|
"learning_rate": 1.969974699638388e-05, |
|
"loss": 0.7671, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 14.871789262668347, |
|
"learning_rate": 1.969062705224682e-05, |
|
"loss": 0.8251, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 13.01131507667653, |
|
"learning_rate": 1.968137284205417e-05, |
|
"loss": 0.8131, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 8.692440461337334, |
|
"learning_rate": 1.967198449402532e-05, |
|
"loss": 0.7195, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 15.307857384021617, |
|
"learning_rate": 1.966246213823818e-05, |
|
"loss": 0.7753, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 22.444147986646815, |
|
"learning_rate": 1.9652805906627356e-05, |
|
"loss": 0.9884, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 14.639613594215758, |
|
"learning_rate": 1.9643015932982355e-05, |
|
"loss": 0.7357, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 10.34217870581945, |
|
"learning_rate": 1.9633092352945698e-05, |
|
"loss": 0.7147, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 12.490262046586464, |
|
"learning_rate": 1.9623035304011062e-05, |
|
"loss": 0.7384, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 15.433175693682523, |
|
"learning_rate": 1.9612844925521375e-05, |
|
"loss": 0.7652, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 6.045080843254174, |
|
"learning_rate": 1.960252135866687e-05, |
|
"loss": 0.8282, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 8.861252768013037, |
|
"learning_rate": 1.9592064746483143e-05, |
|
"loss": 0.7317, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 7.8221459133117435, |
|
"learning_rate": 1.9581475233849165e-05, |
|
"loss": 0.5407, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 12.879262754926978, |
|
"learning_rate": 1.957075296748527e-05, |
|
"loss": 0.6644, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 5.635984955428702, |
|
"learning_rate": 1.9559898095951137e-05, |
|
"loss": 0.6606, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 7.056126169759745, |
|
"learning_rate": 1.9548910769643724e-05, |
|
"loss": 0.5217, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 5.817340227592049, |
|
"learning_rate": 1.953779114079517e-05, |
|
"loss": 0.694, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 11.240599667838348, |
|
"learning_rate": 1.9526539363470715e-05, |
|
"loss": 0.6613, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 6.377305729246939, |
|
"learning_rate": 1.9515155593566536e-05, |
|
"loss": 0.6127, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 12.70631731046457, |
|
"learning_rate": 1.9503639988807605e-05, |
|
"loss": 0.648, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 7.740556879132694, |
|
"learning_rate": 1.9491992708745502e-05, |
|
"loss": 0.6363, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 9.838244865368074, |
|
"learning_rate": 1.94802139147562e-05, |
|
"loss": 0.5207, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 12.292539349351499, |
|
"learning_rate": 1.946830377003782e-05, |
|
"loss": 0.7129, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 9.434980765550153, |
|
"learning_rate": 1.94562624396084e-05, |
|
"loss": 0.623, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 17.774682137642376, |
|
"learning_rate": 1.9444090090303567e-05, |
|
"loss": 0.8399, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 9.97338739760112, |
|
"learning_rate": 1.9431786890774265e-05, |
|
"loss": 0.5893, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 11.708748373782319, |
|
"learning_rate": 1.941935301148439e-05, |
|
"loss": 0.7569, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 10.640144063322005, |
|
"learning_rate": 1.9406788624708438e-05, |
|
"loss": 0.694, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 14.454447375580173, |
|
"learning_rate": 1.939409390452913e-05, |
|
"loss": 0.7807, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 10.35715254572469, |
|
"learning_rate": 1.9381269026834975e-05, |
|
"loss": 0.6799, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 6.919008432462478, |
|
"learning_rate": 1.9368314169317858e-05, |
|
"loss": 0.6883, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 9.137624019639443, |
|
"learning_rate": 1.935522951147056e-05, |
|
"loss": 0.5919, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 15.35731927734825, |
|
"learning_rate": 1.9342015234584277e-05, |
|
"loss": 0.5982, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 28.1752923954513, |
|
"learning_rate": 1.932867152174612e-05, |
|
"loss": 0.6482, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 28.1752923954513, |
|
"learning_rate": 1.932867152174612e-05, |
|
"loss": 1.748, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1587.2702484305357, |
|
"learning_rate": 1.9315198557836555e-05, |
|
"loss": 1.6079, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1587.2702484305357, |
|
"learning_rate": 1.9315198557836555e-05, |
|
"loss": 11.678, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 2434.8463463776825, |
|
"learning_rate": 1.9301596529526856e-05, |
|
"loss": 11.5182, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 2901.204727033937, |
|
"learning_rate": 1.928786562527652e-05, |
|
"loss": 9.7123, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 490.65682411342476, |
|
"learning_rate": 1.9274006035330654e-05, |
|
"loss": 3.1282, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 480.76138720661936, |
|
"learning_rate": 1.9260017951717334e-05, |
|
"loss": 2.4949, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 143.8799125203908, |
|
"learning_rate": 1.9245901568244945e-05, |
|
"loss": 2.1616, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 226.80553179811704, |
|
"learning_rate": 1.9231657080499507e-05, |
|
"loss": 1.9539, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 210.9739319214792, |
|
"learning_rate": 1.921728468584195e-05, |
|
"loss": 1.7932, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 102.69163082309667, |
|
"learning_rate": 1.9202784583405386e-05, |
|
"loss": 1.5288, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 80.79475053684271, |
|
"learning_rate": 1.9188156974092355e-05, |
|
"loss": 1.4608, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 131.37203972527402, |
|
"learning_rate": 1.9173402060572028e-05, |
|
"loss": 1.3191, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 641.6313774584162, |
|
"learning_rate": 1.915852004727742e-05, |
|
"loss": 1.8877, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 535.4790750541629, |
|
"learning_rate": 1.9143511140402532e-05, |
|
"loss": 1.4452, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 548.3152023208836, |
|
"learning_rate": 1.9128375547899518e-05, |
|
"loss": 1.9607, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 568.5307973204539, |
|
"learning_rate": 1.9113113479475784e-05, |
|
"loss": 1.8329, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 247.07029020503853, |
|
"learning_rate": 1.9097725146591103e-05, |
|
"loss": 1.2675, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 483.1919344273096, |
|
"learning_rate": 1.908221076245466e-05, |
|
"loss": 3.1382, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 435.67666262712555, |
|
"learning_rate": 1.9066570542022122e-05, |
|
"loss": 2.9579, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 106.36477945538998, |
|
"learning_rate": 1.905080470199264e-05, |
|
"loss": 1.9584, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 150.21543919990003, |
|
"learning_rate": 1.9034913460805868e-05, |
|
"loss": 1.5164, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 265.0211172938759, |
|
"learning_rate": 1.901889703863891e-05, |
|
"loss": 1.4234, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 135.22687197035, |
|
"learning_rate": 1.90027556574033e-05, |
|
"loss": 1.3242, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 69.09048458856181, |
|
"learning_rate": 1.8986489540741895e-05, |
|
"loss": 1.1573, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 317.48370008905204, |
|
"learning_rate": 1.89700989140258e-05, |
|
"loss": 1.6469, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 178.1220954711751, |
|
"learning_rate": 1.8953584004351243e-05, |
|
"loss": 1.3997, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 58.594800768267845, |
|
"learning_rate": 1.8936945040536413e-05, |
|
"loss": 1.4203, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 100.8540607851877, |
|
"learning_rate": 1.892018225311831e-05, |
|
"loss": 1.3959, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 40.196456568889715, |
|
"learning_rate": 1.8903295874349528e-05, |
|
"loss": 1.3036, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 49.160977991667785, |
|
"learning_rate": 1.8886286138195063e-05, |
|
"loss": 1.2698, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 38.84069117366192, |
|
"learning_rate": 1.8869153280329054e-05, |
|
"loss": 1.1846, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 43.81901383324617, |
|
"learning_rate": 1.885189753813152e-05, |
|
"loss": 1.2088, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 19.950274945570303, |
|
"learning_rate": 1.8834519150685074e-05, |
|
"loss": 1.0781, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 50.927777642645886, |
|
"learning_rate": 1.8817018358771612e-05, |
|
"loss": 1.207, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 45.137241355795545, |
|
"learning_rate": 1.879939540486897e-05, |
|
"loss": 1.1345, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 49.611501553831054, |
|
"learning_rate": 1.8781650533147572e-05, |
|
"loss": 1.1675, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 60.82753635007021, |
|
"learning_rate": 1.876378398946704e-05, |
|
"loss": 1.1098, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 36.20738946945509, |
|
"learning_rate": 1.87457960213728e-05, |
|
"loss": 1.0201, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 46.88562809493131, |
|
"learning_rate": 1.8727686878092638e-05, |
|
"loss": 1.0511, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 38.81307588678313, |
|
"learning_rate": 1.8709456810533248e-05, |
|
"loss": 0.9694, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 29.29930215775943, |
|
"learning_rate": 1.8691106071276763e-05, |
|
"loss": 0.9828, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 54.42807589658664, |
|
"learning_rate": 1.867263491457726e-05, |
|
"loss": 1.1109, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 29.031387361113243, |
|
"learning_rate": 1.8654043596357215e-05, |
|
"loss": 1.0194, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 35.21872276907598, |
|
"learning_rate": 1.8635332374203993e-05, |
|
"loss": 1.0552, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 22.43165286258535, |
|
"learning_rate": 1.8616501507366233e-05, |
|
"loss": 0.9364, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 29.83402427558291, |
|
"learning_rate": 1.85975512567503e-05, |
|
"loss": 1.0535, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 36.54199147143507, |
|
"learning_rate": 1.8578481884916645e-05, |
|
"loss": 0.9772, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 16.06774723588221, |
|
"learning_rate": 1.8559293656076167e-05, |
|
"loss": 0.9044, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 14.242434397235082, |
|
"learning_rate": 1.8539986836086568e-05, |
|
"loss": 0.8656, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 34.06087523793793, |
|
"learning_rate": 1.8520561692448655e-05, |
|
"loss": 0.9776, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 23.65421594444165, |
|
"learning_rate": 1.850101849430264e-05, |
|
"loss": 0.9739, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 53.1622333489428, |
|
"learning_rate": 1.848135751242441e-05, |
|
"loss": 0.9659, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 27.743193122343396, |
|
"learning_rate": 1.8461579019221775e-05, |
|
"loss": 0.9828, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 36.32985964499645, |
|
"learning_rate": 1.8441683288730686e-05, |
|
"loss": 1.0724, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 57.208457798230874, |
|
"learning_rate": 1.8421670596611463e-05, |
|
"loss": 1.1399, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 36.265365003042525, |
|
"learning_rate": 1.840154122014494e-05, |
|
"loss": 1.061, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 50.18323851957246, |
|
"learning_rate": 1.8381295438228656e-05, |
|
"loss": 0.9041, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 23.117360915951355, |
|
"learning_rate": 1.836093353137297e-05, |
|
"loss": 0.974, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 50.588104484420946, |
|
"learning_rate": 1.8340455781697175e-05, |
|
"loss": 0.9803, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 43.25313730795733, |
|
"learning_rate": 1.831986247292561e-05, |
|
"loss": 0.8178, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 25.22535296579869, |
|
"learning_rate": 1.8299153890383705e-05, |
|
"loss": 0.9212, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 24.74441945494526, |
|
"learning_rate": 1.8278330320994035e-05, |
|
"loss": 0.9241, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 24.98014366767532, |
|
"learning_rate": 1.8257392053272345e-05, |
|
"loss": 0.9996, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 30.455334182511248, |
|
"learning_rate": 1.823633937732357e-05, |
|
"loss": 0.8491, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 35.02803006376697, |
|
"learning_rate": 1.8215172584837776e-05, |
|
"loss": 0.9924, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 21.742297963510474, |
|
"learning_rate": 1.8193891969086164e-05, |
|
"loss": 0.9068, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 16.27634444074666, |
|
"learning_rate": 1.817249782491697e-05, |
|
"loss": 0.9532, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 27.206570441621835, |
|
"learning_rate": 1.8150990448751393e-05, |
|
"loss": 0.9585, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 9.683718717075713, |
|
"learning_rate": 1.8129370138579507e-05, |
|
"loss": 0.8514, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 46.207488174236765, |
|
"learning_rate": 1.8107637193956102e-05, |
|
"loss": 0.9744, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 24.195105770115998, |
|
"learning_rate": 1.8085791915996538e-05, |
|
"loss": 0.994, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 33.17832980287206, |
|
"learning_rate": 1.8063834607372603e-05, |
|
"loss": 1.1351, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 18.66629676451458, |
|
"learning_rate": 1.804176557230828e-05, |
|
"loss": 0.7503, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 35.58783412196323, |
|
"learning_rate": 1.8019585116575554e-05, |
|
"loss": 0.8252, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 98.83505787049795, |
|
"learning_rate": 1.799729354749018e-05, |
|
"loss": 0.772, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 21.601957059226525, |
|
"learning_rate": 1.7974891173907406e-05, |
|
"loss": 0.7751, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 25.682884674147235, |
|
"learning_rate": 1.7952378306217705e-05, |
|
"loss": 0.8812, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 14.543550272731224, |
|
"learning_rate": 1.792975525634248e-05, |
|
"loss": 0.8405, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 49.50597218889562, |
|
"learning_rate": 1.790702233772973e-05, |
|
"loss": 0.7983, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 28.92796112130337, |
|
"learning_rate": 1.7884179865349713e-05, |
|
"loss": 0.8288, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 31.306112233207404, |
|
"learning_rate": 1.786122815569058e-05, |
|
"loss": 1.0268, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 23.72305170543349, |
|
"learning_rate": 1.7838167526754002e-05, |
|
"loss": 0.9805, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 30.976885389109945, |
|
"learning_rate": 1.7814998298050744e-05, |
|
"loss": 0.9433, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 9.017186183397232, |
|
"learning_rate": 1.7791720790596242e-05, |
|
"loss": 0.9042, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 24.453663623228078, |
|
"learning_rate": 1.7768335326906182e-05, |
|
"loss": 0.916, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 16.661532775610848, |
|
"learning_rate": 1.774484223099199e-05, |
|
"loss": 0.8476, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 75.49348483174468, |
|
"learning_rate": 1.7721241828356377e-05, |
|
"loss": 0.9214, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 28.038342380284316, |
|
"learning_rate": 1.7697534445988804e-05, |
|
"loss": 0.5674, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 26.918490245230988, |
|
"learning_rate": 1.7673720412360973e-05, |
|
"loss": 0.7465, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 64.73808049621005, |
|
"learning_rate": 1.7649800057422256e-05, |
|
"loss": 0.7832, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 21.87842844322322, |
|
"learning_rate": 1.7625773712595147e-05, |
|
"loss": 0.5816, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 15.934437709078225, |
|
"learning_rate": 1.760164171077064e-05, |
|
"loss": 0.7923, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 50.69489297444858, |
|
"learning_rate": 1.7577404386303646e-05, |
|
"loss": 0.6258, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 21.105815660081543, |
|
"learning_rate": 1.755306207500834e-05, |
|
"loss": 0.7464, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 21.183565369421174, |
|
"learning_rate": 1.7528615114153524e-05, |
|
"loss": 0.6985, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 15.244682840971725, |
|
"learning_rate": 1.750406384245793e-05, |
|
"loss": 0.767, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 10.358767383759252, |
|
"learning_rate": 1.747940860008556e-05, |
|
"loss": 0.6928, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 39.96772160224854, |
|
"learning_rate": 1.7454649728640944e-05, |
|
"loss": 0.6887, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 16.221102491098176, |
|
"learning_rate": 1.742978757116443e-05, |
|
"loss": 0.6359, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 36.830429589303506, |
|
"learning_rate": 1.7404822472127406e-05, |
|
"loss": 0.6717, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 17.661289846973503, |
|
"learning_rate": 1.7379754777427554e-05, |
|
"loss": 0.6609, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 8.91929875914757, |
|
"learning_rate": 1.7354584834384036e-05, |
|
"loss": 0.6851, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 36.39317884774958, |
|
"learning_rate": 1.7329312991732687e-05, |
|
"loss": 0.8089, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 20.66408093673374, |
|
"learning_rate": 1.73039395996212e-05, |
|
"loss": 0.6783, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 13.03603460389346, |
|
"learning_rate": 1.727846500960425e-05, |
|
"loss": 0.7026, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 17.971533560556523, |
|
"learning_rate": 1.725288957463864e-05, |
|
"loss": 0.7989, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 9.851305189485732, |
|
"learning_rate": 1.7227213649078395e-05, |
|
"loss": 0.6135, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 13.9093394062407, |
|
"learning_rate": 1.720143758866988e-05, |
|
"loss": 0.7099, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 15.918012101752101, |
|
"learning_rate": 1.7175561750546833e-05, |
|
"loss": 0.6836, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 9.424349721892472, |
|
"learning_rate": 1.7149586493225453e-05, |
|
"loss": 0.6301, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 15.040725484530588, |
|
"learning_rate": 1.7123512176599413e-05, |
|
"loss": 0.5931, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 10.404277489273346, |
|
"learning_rate": 1.709733916193487e-05, |
|
"loss": 0.5216, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 10.351015057753036, |
|
"learning_rate": 1.7071067811865477e-05, |
|
"loss": 0.6885, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 12.500602158481879, |
|
"learning_rate": 1.704469849038734e-05, |
|
"loss": 0.66, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 9.587643268920083, |
|
"learning_rate": 1.7018231562853987e-05, |
|
"loss": 0.6073, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 13.14738192750954, |
|
"learning_rate": 1.6991667395971306e-05, |
|
"loss": 0.6194, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 11.464802706452456, |
|
"learning_rate": 1.6965006357792454e-05, |
|
"loss": 0.4891, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 6.153912186441177, |
|
"learning_rate": 1.6938248817712767e-05, |
|
"loss": 0.5314, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 13.329406945514984, |
|
"learning_rate": 1.691139514646464e-05, |
|
"loss": 0.7992, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 13.044412872566445, |
|
"learning_rate": 1.6884445716112388e-05, |
|
"loss": 0.7126, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 8.05329073835129, |
|
"learning_rate": 1.685740090004709e-05, |
|
"loss": 0.5594, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 12.513875641703635, |
|
"learning_rate": 1.6830261072981423e-05, |
|
"loss": 0.6085, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 12.059036404622644, |
|
"learning_rate": 1.680302661094446e-05, |
|
"loss": 0.5629, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 10.324831974232412, |
|
"learning_rate": 1.677569789127647e-05, |
|
"loss": 0.6268, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 9.702271050156932, |
|
"learning_rate": 1.6748275292623678e-05, |
|
"loss": 0.621, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 11.784449310834715, |
|
"learning_rate": 1.6720759194933037e-05, |
|
"loss": 0.6856, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 8.598925774309697, |
|
"learning_rate": 1.669314997944694e-05, |
|
"loss": 0.6918, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 11.857440160524549, |
|
"learning_rate": 1.666544802869796e-05, |
|
"loss": 0.6879, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 8.032666301516855, |
|
"learning_rate": 1.6637653726503533e-05, |
|
"loss": 0.5792, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 7.0937478718250295, |
|
"learning_rate": 1.660976745796065e-05, |
|
"loss": 0.6607, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 9.465209749684965, |
|
"learning_rate": 1.658178960944052e-05, |
|
"loss": 0.6535, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 15.131095507571468, |
|
"learning_rate": 1.655372056858322e-05, |
|
"loss": 0.6438, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 17.304531600026, |
|
"learning_rate": 1.6525560724292303e-05, |
|
"loss": 0.7653, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 8.695700770875131, |
|
"learning_rate": 1.6497310466729448e-05, |
|
"loss": 0.6045, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 11.95231240235872, |
|
"learning_rate": 1.646897018730902e-05, |
|
"loss": 0.6791, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 9.486589536350005, |
|
"learning_rate": 1.6440540278692656e-05, |
|
"loss": 0.5519, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 10.961929493661737, |
|
"learning_rate": 1.6412021134783835e-05, |
|
"loss": 0.7537, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 9.852616880372151, |
|
"learning_rate": 1.6383413150722413e-05, |
|
"loss": 0.5775, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 4.770203374130839, |
|
"learning_rate": 1.6354716722879152e-05, |
|
"loss": 0.5684, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 6.203239240182946, |
|
"learning_rate": 1.6325932248850206e-05, |
|
"loss": 0.5391, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 14.714310109670022, |
|
"learning_rate": 1.6297060127451656e-05, |
|
"loss": 0.6426, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 16.664110650870665, |
|
"learning_rate": 1.626810075871394e-05, |
|
"loss": 0.7493, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 10.82617102662038, |
|
"learning_rate": 1.6239054543876345e-05, |
|
"loss": 0.6214, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 13.189719768118387, |
|
"learning_rate": 1.6209921885381418e-05, |
|
"loss": 0.647, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 6.638748272318463, |
|
"learning_rate": 1.6180703186869414e-05, |
|
"loss": 0.6062, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 9.318868865822735, |
|
"learning_rate": 1.615139885317269e-05, |
|
"loss": 0.5877, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 13.07963389547572, |
|
"learning_rate": 1.6122009290310097e-05, |
|
"loss": 0.697, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 8.538286649824611, |
|
"learning_rate": 1.6092534905481367e-05, |
|
"loss": 0.605, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 14.845192579391924, |
|
"learning_rate": 1.6062976107061454e-05, |
|
"loss": 0.5283, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 11.398188334173918, |
|
"learning_rate": 1.6033333304594886e-05, |
|
"loss": 0.7799, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 11.098935636927996, |
|
"learning_rate": 1.6003606908790082e-05, |
|
"loss": 0.6517, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 9.295708766284237, |
|
"learning_rate": 1.5973797331513674e-05, |
|
"loss": 0.8253, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 15.251818246107037, |
|
"learning_rate": 1.5943904985784797e-05, |
|
"loss": 0.729, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 5.329126700593149, |
|
"learning_rate": 1.5913930285769356e-05, |
|
"loss": 0.5526, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 12.120816580430638, |
|
"learning_rate": 1.5883873646774296e-05, |
|
"loss": 0.699, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 10.153946398501363, |
|
"learning_rate": 1.5853735485241858e-05, |
|
"loss": 0.6955, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 6.094377398479003, |
|
"learning_rate": 1.582351621874378e-05, |
|
"loss": 0.6237, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 12.749739058129057, |
|
"learning_rate": 1.579321626597554e-05, |
|
"loss": 0.7169, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 8.376136137589857, |
|
"learning_rate": 1.5762836046750546e-05, |
|
"loss": 0.6543, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 6.832733254275183, |
|
"learning_rate": 1.573237598199432e-05, |
|
"loss": 0.6495, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 10.567138540731836, |
|
"learning_rate": 1.570183649373865e-05, |
|
"loss": 0.6424, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 4.242489418395466, |
|
"learning_rate": 1.5671218005115767e-05, |
|
"loss": 0.5227, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 10.186721290405504, |
|
"learning_rate": 1.5640520940352476e-05, |
|
"loss": 0.6831, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 5.2392014967068405, |
|
"learning_rate": 1.5609745724764264e-05, |
|
"loss": 0.5989, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 7.53125873380665, |
|
"learning_rate": 1.5578892784749414e-05, |
|
"loss": 0.5461, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 11.308114294485758, |
|
"learning_rate": 1.5547962547783126e-05, |
|
"loss": 0.5895, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 9.200894044889376, |
|
"learning_rate": 1.5516955442411533e-05, |
|
"loss": 0.7269, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 7.3282431817133, |
|
"learning_rate": 1.5485871898245824e-05, |
|
"loss": 0.5479, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 14.868640801063636, |
|
"learning_rate": 1.5454712345956254e-05, |
|
"loss": 0.6124, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 18.938959881407193, |
|
"learning_rate": 1.54234772172662e-05, |
|
"loss": 0.6996, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 9.41122569113249, |
|
"learning_rate": 1.539216694494616e-05, |
|
"loss": 0.7736, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 13.076645506243146, |
|
"learning_rate": 1.536078196280777e-05, |
|
"loss": 0.6184, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 14.603964846269042, |
|
"learning_rate": 1.532932270569778e-05, |
|
"loss": 0.7387, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 7.409964871939044, |
|
"learning_rate": 1.5297789609492062e-05, |
|
"loss": 0.6705, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 10.532417778256553, |
|
"learning_rate": 1.526618311108952e-05, |
|
"loss": 0.6359, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 7.918617892191258, |
|
"learning_rate": 1.5234503648406075e-05, |
|
"loss": 0.5597, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 8.984191116234184, |
|
"learning_rate": 1.5202751660368594e-05, |
|
"loss": 0.5886, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 8.875342349884049, |
|
"learning_rate": 1.5170927586908787e-05, |
|
"loss": 0.579, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 12.368851666890444, |
|
"learning_rate": 1.513903186895713e-05, |
|
"loss": 0.684, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 8.674893219974978, |
|
"learning_rate": 1.5107064948436758e-05, |
|
"loss": 0.5412, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 11.644842239089794, |
|
"learning_rate": 1.5075027268257328e-05, |
|
"loss": 0.6165, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 24.313226821351616, |
|
"learning_rate": 1.5042919272308895e-05, |
|
"loss": 1.0199, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 12.280306305433697, |
|
"learning_rate": 1.5010741405455751e-05, |
|
"loss": 0.5976, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 5.830472782170163, |
|
"learning_rate": 1.4978494113530268e-05, |
|
"loss": 0.5179, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 14.70235378916328, |
|
"learning_rate": 1.4946177843326726e-05, |
|
"loss": 0.7092, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 17.543216455121254, |
|
"learning_rate": 1.4913793042595109e-05, |
|
"loss": 0.8279, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 12.859056834147074, |
|
"learning_rate": 1.4881340160034909e-05, |
|
"loss": 0.7344, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 9.382814693311708, |
|
"learning_rate": 1.4848819645288915e-05, |
|
"loss": 0.5823, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 10.602701451692775, |
|
"learning_rate": 1.4816231948936967e-05, |
|
"loss": 0.706, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 9.699931667366167, |
|
"learning_rate": 1.4783577522489733e-05, |
|
"loss": 0.6573, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 8.280832106689648, |
|
"learning_rate": 1.475085681838244e-05, |
|
"loss": 0.6462, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 10.316666897706078, |
|
"learning_rate": 1.4718070289968602e-05, |
|
"loss": 0.6581, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 7.552089443149507, |
|
"learning_rate": 1.4685218391513752e-05, |
|
"loss": 0.6516, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 10.041760330765845, |
|
"learning_rate": 1.4652301578189141e-05, |
|
"loss": 0.7184, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 11.3264716915839, |
|
"learning_rate": 1.4619320306065432e-05, |
|
"loss": 0.6023, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 9.31395109143123, |
|
"learning_rate": 1.4586275032106373e-05, |
|
"loss": 0.6248, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 16.084796485739954, |
|
"learning_rate": 1.4553166214162486e-05, |
|
"loss": 0.5707, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 5.559676215156368, |
|
"learning_rate": 1.4519994310964697e-05, |
|
"loss": 0.545, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 7.5228362636067425, |
|
"learning_rate": 1.4486759782118012e-05, |
|
"loss": 0.634, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 10.426847253583567, |
|
"learning_rate": 1.4453463088095108e-05, |
|
"loss": 0.613, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 6.094049952534055, |
|
"learning_rate": 1.4420104690229992e-05, |
|
"loss": 0.5793, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 6.245104416676842, |
|
"learning_rate": 1.4386685050711593e-05, |
|
"loss": 0.5631, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 7.453663730544318, |
|
"learning_rate": 1.4353204632577354e-05, |
|
"loss": 0.5591, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 10.466741619757949, |
|
"learning_rate": 1.4319663899706818e-05, |
|
"loss": 0.7189, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 6.220499980519979, |
|
"learning_rate": 1.4286063316815209e-05, |
|
"loss": 0.4383, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 10.892786776066355, |
|
"learning_rate": 1.4252403349446986e-05, |
|
"loss": 0.5664, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 6.174114043802528, |
|
"learning_rate": 1.4218684463969396e-05, |
|
"loss": 0.5716, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 4.9332645630935605, |
|
"learning_rate": 1.4184907127566006e-05, |
|
"loss": 0.553, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 7.331476371683697, |
|
"learning_rate": 1.4151071808230246e-05, |
|
"loss": 0.5706, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 9.54440691864147, |
|
"learning_rate": 1.4117178974758903e-05, |
|
"loss": 0.6349, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 9.51093808442514, |
|
"learning_rate": 1.4083229096745644e-05, |
|
"loss": 0.6076, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 7.416880704206411, |
|
"learning_rate": 1.404922264457449e-05, |
|
"loss": 0.5238, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 9.30112890680624, |
|
"learning_rate": 1.4015160089413331e-05, |
|
"loss": 0.6277, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 9.964620600235138, |
|
"learning_rate": 1.3981041903207364e-05, |
|
"loss": 0.6501, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 8.049380804954936, |
|
"learning_rate": 1.3946868558672569e-05, |
|
"loss": 0.5853, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 9.941935672040696, |
|
"learning_rate": 1.3912640529289163e-05, |
|
"loss": 0.6632, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 4.961145697377557, |
|
"learning_rate": 1.3878358289295032e-05, |
|
"loss": 0.5542, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 4.788363887274814, |
|
"learning_rate": 1.3844022313679167e-05, |
|
"loss": 0.5203, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 12.11801121511047, |
|
"learning_rate": 1.380963307817507e-05, |
|
"loss": 0.6661, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 11.34675588679876, |
|
"learning_rate": 1.3775191059254185e-05, |
|
"loss": 0.6116, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 6.678425818352266, |
|
"learning_rate": 1.3740696734119279e-05, |
|
"loss": 0.6401, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 12.160178505852342, |
|
"learning_rate": 1.3706150580697826e-05, |
|
"loss": 0.6138, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 6.094569373598361, |
|
"learning_rate": 1.3671553077635404e-05, |
|
"loss": 0.5602, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 5.908165508182039, |
|
"learning_rate": 1.3636904704289053e-05, |
|
"loss": 0.5606, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 12.500164793835593, |
|
"learning_rate": 1.3602205940720628e-05, |
|
"loss": 0.5601, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 5.981711152222135, |
|
"learning_rate": 1.3567457267690152e-05, |
|
"loss": 0.5011, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 6.390064988076557, |
|
"learning_rate": 1.3532659166649165e-05, |
|
"loss": 0.515, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 11.203440074699836, |
|
"learning_rate": 1.3497812119734037e-05, |
|
"loss": 0.7016, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 13.733785334526278, |
|
"learning_rate": 1.3462916609759298e-05, |
|
"loss": 0.6623, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 15.73461768409232, |
|
"learning_rate": 1.342797312021094e-05, |
|
"loss": 0.7519, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 8.804555804165531, |
|
"learning_rate": 1.3392982135239736e-05, |
|
"loss": 0.531, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 10.63905280022859, |
|
"learning_rate": 1.3357944139654508e-05, |
|
"loss": 0.5914, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 14.738317600036572, |
|
"learning_rate": 1.3322859618915431e-05, |
|
"loss": 0.6387, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 9.315538586392796, |
|
"learning_rate": 1.3287729059127288e-05, |
|
"loss": 0.5793, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 7.330889488527192, |
|
"learning_rate": 1.3252552947032755e-05, |
|
"loss": 0.5955, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 5.492945122022015, |
|
"learning_rate": 1.3217331770005639e-05, |
|
"loss": 0.4936, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 6.447759334210931, |
|
"learning_rate": 1.3182066016044135e-05, |
|
"loss": 0.6306, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 10.58800825139178, |
|
"learning_rate": 1.3146756173764061e-05, |
|
"loss": 0.682, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 11.358838931871405, |
|
"learning_rate": 1.3111402732392098e-05, |
|
"loss": 0.7864, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 8.567212453461783, |
|
"learning_rate": 1.3076006181758989e-05, |
|
"loss": 0.5594, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 8.546126016623386, |
|
"learning_rate": 1.3040567012292779e-05, |
|
"loss": 0.6419, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 11.957735628493818, |
|
"learning_rate": 1.3005085715012003e-05, |
|
"loss": 0.632, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 10.122315964153882, |
|
"learning_rate": 1.2969562781518885e-05, |
|
"loss": 0.6758, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 10.274234862260917, |
|
"learning_rate": 1.2933998703992531e-05, |
|
"loss": 0.4632, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 9.56291976312044, |
|
"learning_rate": 1.2898393975182113e-05, |
|
"loss": 0.7572, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 11.309501354950024, |
|
"learning_rate": 1.2862749088400026e-05, |
|
"loss": 0.5429, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 5.4111570184934195, |
|
"learning_rate": 1.2827064537515075e-05, |
|
"loss": 0.5896, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 4.403745251090165, |
|
"learning_rate": 1.279134081694561e-05, |
|
"loss": 0.6544, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 5.2380739150735565, |
|
"learning_rate": 1.2755578421652696e-05, |
|
"loss": 0.4995, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 6.1969762293333135, |
|
"learning_rate": 1.2719777847133241e-05, |
|
"loss": 0.6669, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 6.009545288953141, |
|
"learning_rate": 1.2683939589413139e-05, |
|
"loss": 0.4661, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 11.862108381435066, |
|
"learning_rate": 1.2648064145040392e-05, |
|
"loss": 0.5745, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 8.371435047081937, |
|
"learning_rate": 1.2612152011078233e-05, |
|
"loss": 0.581, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 6.570729807144181, |
|
"learning_rate": 1.2576203685098233e-05, |
|
"loss": 0.5283, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 15.891490498465696, |
|
"learning_rate": 1.2540219665173423e-05, |
|
"loss": 0.6488, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 11.348604428897819, |
|
"learning_rate": 1.2504200449871378e-05, |
|
"loss": 0.6554, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 10.1223452644342, |
|
"learning_rate": 1.2468146538247308e-05, |
|
"loss": 0.6271, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 14.257981744049157, |
|
"learning_rate": 1.2432058429837153e-05, |
|
"loss": 0.694, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 14.906690269642054, |
|
"learning_rate": 1.2395936624650664e-05, |
|
"loss": 0.6699, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 7.166307639896277, |
|
"learning_rate": 1.2359781623164465e-05, |
|
"loss": 0.5572, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 13.74157282336694, |
|
"learning_rate": 1.2323593926315113e-05, |
|
"loss": 0.7576, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 9.280109663389997, |
|
"learning_rate": 1.2287374035492184e-05, |
|
"loss": 0.5046, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 10.090941451576859, |
|
"learning_rate": 1.2251122452531301e-05, |
|
"loss": 0.5604, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 5.903088605165492, |
|
"learning_rate": 1.2214839679707193e-05, |
|
"loss": 0.5624, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 10.167630601617299, |
|
"learning_rate": 1.2178526219726723e-05, |
|
"loss": 0.5295, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 8.331133592273517, |
|
"learning_rate": 1.2142182575721946e-05, |
|
"loss": 0.6144, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 9.705907066152204, |
|
"learning_rate": 1.2105809251243113e-05, |
|
"loss": 0.6389, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 11.02758523623926, |
|
"learning_rate": 1.2069406750251713e-05, |
|
"loss": 0.666, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 7.447181404677012, |
|
"learning_rate": 1.2032975577113474e-05, |
|
"loss": 0.7216, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 7.658897343141703, |
|
"learning_rate": 1.1996516236591398e-05, |
|
"loss": 0.6087, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 4.187819572120313, |
|
"learning_rate": 1.1960029233838737e-05, |
|
"loss": 0.5356, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 5.7176090622427385, |
|
"learning_rate": 1.1923515074392022e-05, |
|
"loss": 0.619, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 6.823057157296125, |
|
"learning_rate": 1.1886974264164037e-05, |
|
"loss": 0.6037, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 11.059981117618824, |
|
"learning_rate": 1.1850407309436831e-05, |
|
"loss": 0.604, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 9.504143330556396, |
|
"learning_rate": 1.181381471685468e-05, |
|
"loss": 0.647, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 6.557755314159755, |
|
"learning_rate": 1.1777196993417087e-05, |
|
"loss": 0.4595, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 6.9247206639927095, |
|
"learning_rate": 1.1740554646471742e-05, |
|
"loss": 0.5228, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 10.232093839271188, |
|
"learning_rate": 1.1703888183707513e-05, |
|
"loss": 0.6062, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 6.510487958239591, |
|
"learning_rate": 1.166719811314738e-05, |
|
"loss": 0.5219, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 8.968572470896715, |
|
"learning_rate": 1.1630484943141428e-05, |
|
"loss": 0.5209, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 5.0494617841107265, |
|
"learning_rate": 1.1593749182359788e-05, |
|
"loss": 0.5948, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 11.340103876058729, |
|
"learning_rate": 1.1556991339785595e-05, |
|
"loss": 0.6203, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 10.146196176770191, |
|
"learning_rate": 1.1520211924707919e-05, |
|
"loss": 0.7785, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 4.43271626514912, |
|
"learning_rate": 1.1483411446714744e-05, |
|
"loss": 0.4862, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 7.824102062875491, |
|
"learning_rate": 1.1446590415685863e-05, |
|
"loss": 0.6113, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 6.336647655938098, |
|
"learning_rate": 1.1409749341785859e-05, |
|
"loss": 0.5343, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 4.855528076878574, |
|
"learning_rate": 1.1372888735456988e-05, |
|
"loss": 0.5014, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 8.244191689910387, |
|
"learning_rate": 1.1336009107412162e-05, |
|
"loss": 0.64, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 9.72186897530395, |
|
"learning_rate": 1.1299110968627822e-05, |
|
"loss": 0.5919, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 6.153546625651434, |
|
"learning_rate": 1.1262194830336888e-05, |
|
"loss": 0.4917, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 8.261625036086883, |
|
"learning_rate": 1.1225261204021662e-05, |
|
"loss": 0.5592, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 7.32183514687012, |
|
"learning_rate": 1.118831060140676e-05, |
|
"loss": 0.4822, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 6.309347895522231, |
|
"learning_rate": 1.1151343534451995e-05, |
|
"loss": 0.5727, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 7.8065524764950265, |
|
"learning_rate": 1.1114360515345301e-05, |
|
"loss": 0.6533, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 9.553172955626218, |
|
"learning_rate": 1.107736205649564e-05, |
|
"loss": 0.4895, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 8.353357382799153, |
|
"learning_rate": 1.1040348670525889e-05, |
|
"loss": 0.5254, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 11.74491323882519, |
|
"learning_rate": 1.1003320870265741e-05, |
|
"loss": 0.6004, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 3.4228605283872797, |
|
"learning_rate": 1.096627916874461e-05, |
|
"loss": 0.4572, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 11.366613402895425, |
|
"learning_rate": 1.0929224079184514e-05, |
|
"loss": 0.648, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 5.915082226142763, |
|
"learning_rate": 1.0892156114992963e-05, |
|
"loss": 0.5606, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 5.481987693582871, |
|
"learning_rate": 1.0855075789755845e-05, |
|
"loss": 0.5845, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 6.226052256005356, |
|
"learning_rate": 1.0817983617230326e-05, |
|
"loss": 0.5025, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 8.531553272430466, |
|
"learning_rate": 1.0780880111337704e-05, |
|
"loss": 0.5657, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 6.998952295190077, |
|
"learning_rate": 1.0743765786156313e-05, |
|
"loss": 0.7092, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 6.513406245121362, |
|
"learning_rate": 1.0706641155914384e-05, |
|
"loss": 0.548, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 8.48581353131954, |
|
"learning_rate": 1.066950673498294e-05, |
|
"loss": 0.5551, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 5.34639942444931, |
|
"learning_rate": 1.0632363037868638e-05, |
|
"loss": 0.4334, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 11.269718113412772, |
|
"learning_rate": 1.0595210579206676e-05, |
|
"loss": 0.5803, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 11.41953037054744, |
|
"learning_rate": 1.0558049873753637e-05, |
|
"loss": 0.5186, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 11.07638179235055, |
|
"learning_rate": 1.0520881436380366e-05, |
|
"loss": 0.6146, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 11.357643871564994, |
|
"learning_rate": 1.0483705782064835e-05, |
|
"loss": 0.4825, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 8.12832951837023, |
|
"learning_rate": 1.0446523425885008e-05, |
|
"loss": 0.5972, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 4.573339318696573, |
|
"learning_rate": 1.040933488301171e-05, |
|
"loss": 0.4511, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 7.488115795627891, |
|
"learning_rate": 1.0372140668701483e-05, |
|
"loss": 0.5612, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 4.416399464656011, |
|
"learning_rate": 1.0334941298289441e-05, |
|
"loss": 0.5678, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 8.472247115461284, |
|
"learning_rate": 1.0297737287182144e-05, |
|
"loss": 0.5397, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 7.278212668194716, |
|
"learning_rate": 1.026052915085045e-05, |
|
"loss": 0.5597, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 5.04716478079048, |
|
"learning_rate": 1.022331740482237e-05, |
|
"loss": 0.5014, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 11.570053771414813, |
|
"learning_rate": 1.0186102564675928e-05, |
|
"loss": 0.661, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 8.829505233768511, |
|
"learning_rate": 1.014888514603202e-05, |
|
"loss": 0.5232, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 9.542821950950582, |
|
"learning_rate": 1.0111665664547267e-05, |
|
"loss": 0.5823, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 4.63327242048301, |
|
"learning_rate": 1.0074444635906875e-05, |
|
"loss": 0.5625, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 6.365741719759197, |
|
"learning_rate": 1.0037222575817476e-05, |
|
"loss": 0.6587, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 8.488178372143695, |
|
"learning_rate": 1e-05, |
|
"loss": 0.5501, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 9.897138663353587, |
|
"learning_rate": 9.962777424182527e-06, |
|
"loss": 0.6661, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 10.569907059227898, |
|
"learning_rate": 9.92555536409313e-06, |
|
"loss": 0.5958, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 3.736421534561604, |
|
"learning_rate": 9.888334335452734e-06, |
|
"loss": 0.5303, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 8.836116584372025, |
|
"learning_rate": 9.85111485396798e-06, |
|
"loss": 0.6466, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 7.181117148918435, |
|
"learning_rate": 9.813897435324075e-06, |
|
"loss": 0.54, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 4.785898028898109, |
|
"learning_rate": 9.776682595177633e-06, |
|
"loss": 0.4985, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 11.140585777357792, |
|
"learning_rate": 9.739470849149554e-06, |
|
"loss": 0.6496, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 3.531792374263498, |
|
"learning_rate": 9.702262712817857e-06, |
|
"loss": 0.5364, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 6.601039058908546, |
|
"learning_rate": 9.66505870171056e-06, |
|
"loss": 0.5116, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 8.927040349981295, |
|
"learning_rate": 9.627859331298522e-06, |
|
"loss": 0.5219, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 6.1651706502501264, |
|
"learning_rate": 9.590665116988293e-06, |
|
"loss": 0.5608, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 9.745244839875141, |
|
"learning_rate": 9.553476574114993e-06, |
|
"loss": 0.5524, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 7.008580981680822, |
|
"learning_rate": 9.51629421793517e-06, |
|
"loss": 0.5662, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 7.687272495275157, |
|
"learning_rate": 9.479118563619638e-06, |
|
"loss": 0.5806, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 5.593165759891498, |
|
"learning_rate": 9.441950126246365e-06, |
|
"loss": 0.5447, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 4.518154487773691, |
|
"learning_rate": 9.404789420793327e-06, |
|
"loss": 0.5662, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 5.979882247197896, |
|
"learning_rate": 9.367636962131364e-06, |
|
"loss": 0.5499, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 10.248432776308547, |
|
"learning_rate": 9.330493265017062e-06, |
|
"loss": 0.6341, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 7.4913799071398275, |
|
"learning_rate": 9.29335884408562e-06, |
|
"loss": 0.4995, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 12.97254908941589, |
|
"learning_rate": 9.25623421384369e-06, |
|
"loss": 0.5575, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 7.7639027583981095, |
|
"learning_rate": 9.2191198886623e-06, |
|
"loss": 0.5819, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 7.6543085882238895, |
|
"learning_rate": 9.182016382769678e-06, |
|
"loss": 0.6647, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 5.7656791466080355, |
|
"learning_rate": 9.144924210244155e-06, |
|
"loss": 0.6249, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 5.2481708291921505, |
|
"learning_rate": 9.107843885007042e-06, |
|
"loss": 0.5478, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 10.21885595901045, |
|
"learning_rate": 9.070775920815488e-06, |
|
"loss": 0.5803, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 7.389626516947618, |
|
"learning_rate": 9.033720831255391e-06, |
|
"loss": 0.5334, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 11.910444324111724, |
|
"learning_rate": 8.996679129734266e-06, |
|
"loss": 0.5788, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 10.70692641976889, |
|
"learning_rate": 8.959651329474115e-06, |
|
"loss": 0.5155, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 5.6372805638926, |
|
"learning_rate": 8.922637943504362e-06, |
|
"loss": 0.5881, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 9.8644472116661, |
|
"learning_rate": 8.8856394846547e-06, |
|
"loss": 0.5072, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 11.356856786195184, |
|
"learning_rate": 8.848656465548007e-06, |
|
"loss": 0.549, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 11.998300599364015, |
|
"learning_rate": 8.811689398593245e-06, |
|
"loss": 0.5355, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 5.815584273089378, |
|
"learning_rate": 8.774738795978341e-06, |
|
"loss": 0.4673, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 9.210932501903986, |
|
"learning_rate": 8.737805169663113e-06, |
|
"loss": 0.5137, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 10.604624719913076, |
|
"learning_rate": 8.700889031372183e-06, |
|
"loss": 0.6092, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 6.733937738438429, |
|
"learning_rate": 8.663990892587839e-06, |
|
"loss": 0.5617, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 6.687819879267525, |
|
"learning_rate": 8.627111264543012e-06, |
|
"loss": 0.4319, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 11.209429725910061, |
|
"learning_rate": 8.590250658214148e-06, |
|
"loss": 0.5174, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 5.337704061513362, |
|
"learning_rate": 8.553409584314138e-06, |
|
"loss": 0.4295, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 9.126368763120784, |
|
"learning_rate": 8.516588553285258e-06, |
|
"loss": 0.4436, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 10.856129898989279, |
|
"learning_rate": 8.479788075292083e-06, |
|
"loss": 0.6126, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 5.163627248000245, |
|
"learning_rate": 8.443008660214409e-06, |
|
"loss": 0.5352, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 6.372067670711978, |
|
"learning_rate": 8.406250817640212e-06, |
|
"loss": 0.5076, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 6.426831912031028, |
|
"learning_rate": 8.369515056858575e-06, |
|
"loss": 0.6041, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 6.985038214862641, |
|
"learning_rate": 8.332801886852624e-06, |
|
"loss": 0.6586, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 6.593624344866879, |
|
"learning_rate": 8.296111816292494e-06, |
|
"loss": 0.6751, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 7.554153671309798, |
|
"learning_rate": 8.259445353528261e-06, |
|
"loss": 0.5276, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 9.224509289941837, |
|
"learning_rate": 8.222803006582915e-06, |
|
"loss": 0.5993, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 16.50239364012574, |
|
"learning_rate": 8.186185283145325e-06, |
|
"loss": 0.6259, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 15.120105685997352, |
|
"learning_rate": 8.149592690563172e-06, |
|
"loss": 0.6467, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 7.220612802422961, |
|
"learning_rate": 8.113025735835965e-06, |
|
"loss": 0.5715, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 8.562863959435413, |
|
"learning_rate": 8.076484925607983e-06, |
|
"loss": 0.5466, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 12.490548007030851, |
|
"learning_rate": 8.039970766161264e-06, |
|
"loss": 0.5514, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 6.5845388402784435, |
|
"learning_rate": 8.003483763408604e-06, |
|
"loss": 0.5687, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 8.50284284365726, |
|
"learning_rate": 7.967024422886528e-06, |
|
"loss": 0.4796, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 7.038271102438535, |
|
"learning_rate": 7.930593249748289e-06, |
|
"loss": 0.5228, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 11.889834227941325, |
|
"learning_rate": 7.894190748756892e-06, |
|
"loss": 0.4577, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 6.711588374541616, |
|
"learning_rate": 7.857817424278056e-06, |
|
"loss": 0.498, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 7.966852766279544, |
|
"learning_rate": 7.821473780273279e-06, |
|
"loss": 0.5579, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 8.417280128263915, |
|
"learning_rate": 7.785160320292812e-06, |
|
"loss": 0.4942, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 4.894439384156987, |
|
"learning_rate": 7.7488775474687e-06, |
|
"loss": 0.5082, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 7.960251253611578, |
|
"learning_rate": 7.712625964507818e-06, |
|
"loss": 0.4997, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 8.640093158656311, |
|
"learning_rate": 7.676406073684892e-06, |
|
"loss": 0.5362, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 11.82405754186769, |
|
"learning_rate": 7.64021837683554e-06, |
|
"loss": 0.5292, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 5.258983750312472, |
|
"learning_rate": 7.604063375349338e-06, |
|
"loss": 0.547, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 10.013781925100409, |
|
"learning_rate": 7.567941570162849e-06, |
|
"loss": 0.538, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 7.8051606786973755, |
|
"learning_rate": 7.531853461752696e-06, |
|
"loss": 0.5122, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 7.2586272669558145, |
|
"learning_rate": 7.495799550128625e-06, |
|
"loss": 0.4468, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 5.774720107719125, |
|
"learning_rate": 7.459780334826578e-06, |
|
"loss": 0.5275, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 7.511213422739932, |
|
"learning_rate": 7.423796314901769e-06, |
|
"loss": 0.5768, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 13.06441655084729, |
|
"learning_rate": 7.387847988921772e-06, |
|
"loss": 0.6067, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 9.818511674108413, |
|
"learning_rate": 7.351935854959608e-06, |
|
"loss": 0.5669, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 12.085632705436097, |
|
"learning_rate": 7.31606041058686e-06, |
|
"loss": 0.6871, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 6.460535141416251, |
|
"learning_rate": 7.2802221528667604e-06, |
|
"loss": 0.4964, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 3.7229407170275715, |
|
"learning_rate": 7.244421578347307e-06, |
|
"loss": 0.4216, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 8.837577427746787, |
|
"learning_rate": 7.208659183054393e-06, |
|
"loss": 0.5951, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 4.340949956633562, |
|
"learning_rate": 7.172935462484931e-06, |
|
"loss": 0.3946, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 5.8235051717682405, |
|
"learning_rate": 7.137250911599978e-06, |
|
"loss": 0.4795, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 5.498783950116057, |
|
"learning_rate": 7.101606024817889e-06, |
|
"loss": 0.5192, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 3.1814150008692534, |
|
"learning_rate": 7.066001296007469e-06, |
|
"loss": 0.3841, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 5.5757391334979225, |
|
"learning_rate": 7.030437218481116e-06, |
|
"loss": 0.4353, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 13.254243576587063, |
|
"learning_rate": 6.9949142849880015e-06, |
|
"loss": 0.5673, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 4.1536903593774515, |
|
"learning_rate": 6.959432987707223e-06, |
|
"loss": 0.4192, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 3.4786455475133287, |
|
"learning_rate": 6.9239938182410126e-06, |
|
"loss": 0.4249, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 7.5677310862475915, |
|
"learning_rate": 6.888597267607906e-06, |
|
"loss": 0.4949, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 8.368860704109643, |
|
"learning_rate": 6.8532438262359404e-06, |
|
"loss": 0.4154, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 5.058418452049737, |
|
"learning_rate": 6.817933983955867e-06, |
|
"loss": 0.4951, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 6.42488252810342, |
|
"learning_rate": 6.7826682299943635e-06, |
|
"loss": 0.5948, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 7.090667184322379, |
|
"learning_rate": 6.747447052967246e-06, |
|
"loss": 0.564, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 9.915670648118702, |
|
"learning_rate": 6.712270940872713e-06, |
|
"loss": 0.4821, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 10.251562644163112, |
|
"learning_rate": 6.677140381084573e-06, |
|
"loss": 0.5258, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 5.692081829738655, |
|
"learning_rate": 6.642055860345494e-06, |
|
"loss": 0.4832, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 4.7513407845962545, |
|
"learning_rate": 6.607017864760265e-06, |
|
"loss": 0.4669, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 7.302874981115006, |
|
"learning_rate": 6.572026879789064e-06, |
|
"loss": 0.4492, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 13.730001115703725, |
|
"learning_rate": 6.537083390240706e-06, |
|
"loss": 0.5786, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 5.826147970750949, |
|
"learning_rate": 6.502187880265969e-06, |
|
"loss": 0.3817, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 8.854083264240913, |
|
"learning_rate": 6.467340833350837e-06, |
|
"loss": 0.4938, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 5.711323962217592, |
|
"learning_rate": 6.43254273230985e-06, |
|
"loss": 0.4934, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 6.916231992838763, |
|
"learning_rate": 6.3977940592793766e-06, |
|
"loss": 0.5225, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 9.605613693440635, |
|
"learning_rate": 6.36309529571095e-06, |
|
"loss": 0.527, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 7.226506391255898, |
|
"learning_rate": 6.328446922364595e-06, |
|
"loss": 0.4771, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 5.211969056714009, |
|
"learning_rate": 6.293849419302179e-06, |
|
"loss": 0.5099, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 11.660847177274526, |
|
"learning_rate": 6.259303265880725e-06, |
|
"loss": 0.5479, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 8.043560392139083, |
|
"learning_rate": 6.224808940745814e-06, |
|
"loss": 0.5182, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 8.052748988432281, |
|
"learning_rate": 6.190366921824933e-06, |
|
"loss": 0.6176, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 7.108683500222242, |
|
"learning_rate": 6.155977686320837e-06, |
|
"loss": 0.5878, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 11.823467597819771, |
|
"learning_rate": 6.121641710704972e-06, |
|
"loss": 0.4971, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 10.40651530100889, |
|
"learning_rate": 6.087359470710841e-06, |
|
"loss": 0.4427, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 11.976145453386437, |
|
"learning_rate": 6.053131441327431e-06, |
|
"loss": 0.5098, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 7.971563751964657, |
|
"learning_rate": 6.018958096792642e-06, |
|
"loss": 0.5695, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 6.129677385908451, |
|
"learning_rate": 5.98483991058667e-06, |
|
"loss": 0.4808, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 4.479658413986526, |
|
"learning_rate": 5.950777355425511e-06, |
|
"loss": 0.4775, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 4.309077539414612, |
|
"learning_rate": 5.916770903254363e-06, |
|
"loss": 0.4718, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 5.726658232065661, |
|
"learning_rate": 5.8828210252411e-06, |
|
"loss": 0.5147, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 6.432897045805533, |
|
"learning_rate": 5.848928191769753e-06, |
|
"loss": 0.5047, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 6.06082150623531, |
|
"learning_rate": 5.815092872433994e-06, |
|
"loss": 0.5296, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 7.760932921939526, |
|
"learning_rate": 5.781315536030607e-06, |
|
"loss": 0.5339, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 6.0611679377908825, |
|
"learning_rate": 5.74759665055302e-06, |
|
"loss": 0.4212, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 6.4875374667111565, |
|
"learning_rate": 5.713936683184795e-06, |
|
"loss": 0.5394, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 5.850168689232539, |
|
"learning_rate": 5.680336100293182e-06, |
|
"loss": 0.4512, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 9.444171703118819, |
|
"learning_rate": 5.6467953674226505e-06, |
|
"loss": 0.3942, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 8.582206095093797, |
|
"learning_rate": 5.613314949288409e-06, |
|
"loss": 0.4582, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 11.757390924671101, |
|
"learning_rate": 5.579895309770009e-06, |
|
"loss": 0.5435, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 5.02260927644731, |
|
"learning_rate": 5.546536911904896e-06, |
|
"loss": 0.4719, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 9.22536236771837, |
|
"learning_rate": 5.513240217881992e-06, |
|
"loss": 0.4668, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 10.157555843180285, |
|
"learning_rate": 5.4800056890353025e-06, |
|
"loss": 0.5409, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 12.323174404868091, |
|
"learning_rate": 5.44683378583752e-06, |
|
"loss": 0.5631, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 8.185618668543789, |
|
"learning_rate": 5.4137249678936265e-06, |
|
"loss": 0.4284, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 8.516855530899237, |
|
"learning_rate": 5.380679693934569e-06, |
|
"loss": 0.4221, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 11.008555199751408, |
|
"learning_rate": 5.347698421810861e-06, |
|
"loss": 0.5924, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 15.331344015670625, |
|
"learning_rate": 5.31478160848625e-06, |
|
"loss": 0.642, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 10.747880055046071, |
|
"learning_rate": 5.2819297100314e-06, |
|
"loss": 0.5417, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 15.482764407878273, |
|
"learning_rate": 5.249143181617562e-06, |
|
"loss": 0.63, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 4.174929188215974, |
|
"learning_rate": 5.216422477510267e-06, |
|
"loss": 0.4989, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 3.810136037355045, |
|
"learning_rate": 5.183768051063036e-06, |
|
"loss": 0.4893, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 6.8676100081227744, |
|
"learning_rate": 5.151180354711087e-06, |
|
"loss": 0.5768, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 10.044174931284044, |
|
"learning_rate": 5.11865983996509e-06, |
|
"loss": 0.4937, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 5.716263993029701, |
|
"learning_rate": 5.0862069574048956e-06, |
|
"loss": 0.4918, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 5.327730475446738, |
|
"learning_rate": 5.053822156673276e-06, |
|
"loss": 0.5112, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 9.70768865377853, |
|
"learning_rate": 5.021505886469733e-06, |
|
"loss": 0.5682, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 7.449826876587748, |
|
"learning_rate": 4.989258594544252e-06, |
|
"loss": 0.5914, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 6.246979693998428, |
|
"learning_rate": 4.957080727691107e-06, |
|
"loss": 0.5022, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 6.234169934241622, |
|
"learning_rate": 4.9249727317426765e-06, |
|
"loss": 0.5086, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 8.795959544866905, |
|
"learning_rate": 4.892935051563243e-06, |
|
"loss": 0.5631, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 6.585824557633543, |
|
"learning_rate": 4.86096813104287e-06, |
|
"loss": 0.5387, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 9.412528445371356, |
|
"learning_rate": 4.829072413091219e-06, |
|
"loss": 0.5903, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 4.934030658656449, |
|
"learning_rate": 4.797248339631411e-06, |
|
"loss": 0.5905, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 7.225794451726558, |
|
"learning_rate": 4.765496351593927e-06, |
|
"loss": 0.595, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 10.387771035955092, |
|
"learning_rate": 4.733816888910483e-06, |
|
"loss": 0.4901, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 5.585297554337063, |
|
"learning_rate": 4.7022103905079405e-06, |
|
"loss": 0.3997, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 5.2187286296388375, |
|
"learning_rate": 4.67067729430222e-06, |
|
"loss": 0.4856, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 3.4821810592954296, |
|
"learning_rate": 4.639218037192235e-06, |
|
"loss": 0.4361, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 3.959673908934417, |
|
"learning_rate": 4.607833055053843e-06, |
|
"loss": 0.3775, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 5.129251200576683, |
|
"learning_rate": 4.576522782733802e-06, |
|
"loss": 0.4947, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 4.8501294404892, |
|
"learning_rate": 4.545287654043751e-06, |
|
"loss": 0.3936, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 5.318966235176163, |
|
"learning_rate": 4.514128101754183e-06, |
|
"loss": 0.4649, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 7.407733161742192, |
|
"learning_rate": 4.483044557588473e-06, |
|
"loss": 0.5102, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 4.965311089759532, |
|
"learning_rate": 4.45203745221688e-06, |
|
"loss": 0.457, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 7.5517122869625135, |
|
"learning_rate": 4.4211072152505866e-06, |
|
"loss": 0.436, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 5.082245336395077, |
|
"learning_rate": 4.3902542752357415e-06, |
|
"loss": 0.4936, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 9.843843848674208, |
|
"learning_rate": 4.359479059647527e-06, |
|
"loss": 0.4336, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 5.231213013499434, |
|
"learning_rate": 4.3287819948842334e-06, |
|
"loss": 0.4247, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 6.305006534689214, |
|
"learning_rate": 4.2981635062613556e-06, |
|
"loss": 0.4826, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 5.533371229509814, |
|
"learning_rate": 4.267624018005686e-06, |
|
"loss": 0.4353, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 7.108534233713205, |
|
"learning_rate": 4.237163953249453e-06, |
|
"loss": 0.5991, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 4.76045650818351, |
|
"learning_rate": 4.206783734024463e-06, |
|
"loss": 0.4514, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 8.886091309499616, |
|
"learning_rate": 4.176483781256224e-06, |
|
"loss": 0.4023, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 7.524320382507302, |
|
"learning_rate": 4.1462645147581456e-06, |
|
"loss": 0.4514, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 7.482440196178911, |
|
"learning_rate": 4.116126353225703e-06, |
|
"loss": 0.5668, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 9.764506225206542, |
|
"learning_rate": 4.086069714230646e-06, |
|
"loss": 0.539, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 8.336719846814763, |
|
"learning_rate": 4.056095014215208e-06, |
|
"loss": 0.6426, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 4.148296296213888, |
|
"learning_rate": 4.0262026684863295e-06, |
|
"loss": 0.3376, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 9.203116559209336, |
|
"learning_rate": 3.99639309120992e-06, |
|
"loss": 0.5199, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 4.576118300820756, |
|
"learning_rate": 3.96666669540512e-06, |
|
"loss": 0.3731, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 9.971893856018951, |
|
"learning_rate": 3.937023892938549e-06, |
|
"loss": 0.591, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 5.809308796580302, |
|
"learning_rate": 3.907465094518636e-06, |
|
"loss": 0.5063, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 5.798443674221982, |
|
"learning_rate": 3.877990709689905e-06, |
|
"loss": 0.462, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 9.114741488810273, |
|
"learning_rate": 3.8486011468273145e-06, |
|
"loss": 0.5972, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 10.35491156365141, |
|
"learning_rate": 3.819296813130588e-06, |
|
"loss": 0.46, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 8.13256163586952, |
|
"learning_rate": 3.790078114618586e-06, |
|
"loss": 0.46, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 8.953602041543364, |
|
"learning_rate": 3.760945456123656e-06, |
|
"loss": 0.5296, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 6.830059498599811, |
|
"learning_rate": 3.731899241286061e-06, |
|
"loss": 0.4798, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 8.333772044542284, |
|
"learning_rate": 3.702939872548348e-06, |
|
"loss": 0.5452, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 7.7830518181331785, |
|
"learning_rate": 3.6740677511497958e-06, |
|
"loss": 0.52, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 9.48244358766429, |
|
"learning_rate": 3.6452832771208524e-06, |
|
"loss": 0.4604, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 5.325755301496669, |
|
"learning_rate": 3.616586849277587e-06, |
|
"loss": 0.4801, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 7.883051056212922, |
|
"learning_rate": 3.587978865216165e-06, |
|
"loss": 0.4396, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 7.231451053733015, |
|
"learning_rate": 3.559459721307349e-06, |
|
"loss": 0.5395, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 7.736510298846521, |
|
"learning_rate": 3.5310298126909814e-06, |
|
"loss": 0.4596, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 5.986545004035281, |
|
"learning_rate": 3.5026895332705504e-06, |
|
"loss": 0.4593, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 4.438626475367621, |
|
"learning_rate": 3.474439275707698e-06, |
|
"loss": 0.397, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 9.623477478063533, |
|
"learning_rate": 3.4462794314167846e-06, |
|
"loss": 0.509, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 9.394298451398283, |
|
"learning_rate": 3.418210390559481e-06, |
|
"loss": 0.4663, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 9.315469015310178, |
|
"learning_rate": 3.390232542039352e-06, |
|
"loss": 0.5065, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 6.092259066777256, |
|
"learning_rate": 3.3623462734964697e-06, |
|
"loss": 0.4477, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 4.319138234974373, |
|
"learning_rate": 3.3345519713020445e-06, |
|
"loss": 0.3925, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 6.358450139297925, |
|
"learning_rate": 3.3068500205530596e-06, |
|
"loss": 0.4679, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 7.701373209390687, |
|
"learning_rate": 3.2792408050669634e-06, |
|
"loss": 0.409, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 9.55351492544124, |
|
"learning_rate": 3.2517247073763237e-06, |
|
"loss": 0.4771, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 4.7780971794647025, |
|
"learning_rate": 3.2243021087235336e-06, |
|
"loss": 0.4689, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 7.114890807561552, |
|
"learning_rate": 3.196973389055542e-06, |
|
"loss": 0.4797, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 7.072041698936169, |
|
"learning_rate": 3.16973892701858e-06, |
|
"loss": 0.5768, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 5.961275677943624, |
|
"learning_rate": 3.142599099952912e-06, |
|
"loss": 0.3881, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 4.06251591074102, |
|
"learning_rate": 3.115554283887614e-06, |
|
"loss": 0.375, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 7.326058092329529, |
|
"learning_rate": 3.088604853535361e-06, |
|
"loss": 0.5388, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 5.274081659887408, |
|
"learning_rate": 3.0617511822872337e-06, |
|
"loss": 0.4742, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 4.793423500693722, |
|
"learning_rate": 3.03499364220755e-06, |
|
"loss": 0.4056, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 5.269835865291424, |
|
"learning_rate": 3.0083326040286977e-06, |
|
"loss": 0.4332, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 6.413197977206429, |
|
"learning_rate": 2.9817684371460155e-06, |
|
"loss": 0.5452, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 8.581131926197953, |
|
"learning_rate": 2.9553015096126638e-06, |
|
"loss": 0.4393, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 4.5160115188573045, |
|
"learning_rate": 2.9289321881345257e-06, |
|
"loss": 0.4465, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 5.2592392037223235, |
|
"learning_rate": 2.902660838065131e-06, |
|
"loss": 0.5082, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 7.05269405170704, |
|
"learning_rate": 2.8764878234005876e-06, |
|
"loss": 0.372, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 5.274091473047936, |
|
"learning_rate": 2.8504135067745463e-06, |
|
"loss": 0.4121, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 7.3781029800169655, |
|
"learning_rate": 2.8244382494531675e-06, |
|
"loss": 0.4067, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 8.32474239381458, |
|
"learning_rate": 2.798562411330126e-06, |
|
"loss": 0.3679, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 12.450031335391019, |
|
"learning_rate": 2.772786350921608e-06, |
|
"loss": 0.5521, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 7.249666510251534, |
|
"learning_rate": 2.7471104253613645e-06, |
|
"loss": 0.3938, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 7.2873098383670065, |
|
"learning_rate": 2.721534990395752e-06, |
|
"loss": 0.4884, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 5.438711289922824, |
|
"learning_rate": 2.6960604003788014e-06, |
|
"loss": 0.364, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 9.79198662580905, |
|
"learning_rate": 2.670687008267313e-06, |
|
"loss": 0.3911, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 11.420021502949318, |
|
"learning_rate": 2.6454151656159666e-06, |
|
"loss": 0.4454, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 8.204149460627178, |
|
"learning_rate": 2.6202452225724463e-06, |
|
"loss": 0.4276, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 10.999756173661007, |
|
"learning_rate": 2.5951775278725956e-06, |
|
"loss": 0.5623, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 8.568105116698392, |
|
"learning_rate": 2.5702124288355723e-06, |
|
"loss": 0.3292, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 5.56439861064704, |
|
"learning_rate": 2.545350271359055e-06, |
|
"loss": 0.3879, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 8.106162701050645, |
|
"learning_rate": 2.520591399914444e-06, |
|
"loss": 0.364, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 10.29859188596712, |
|
"learning_rate": 2.495936157542074e-06, |
|
"loss": 0.4791, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 7.657220905887133, |
|
"learning_rate": 2.471384885846482e-06, |
|
"loss": 0.4902, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 6.191995160106613, |
|
"learning_rate": 2.4469379249916614e-06, |
|
"loss": 0.4585, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 7.623947016373425, |
|
"learning_rate": 2.422595613696356e-06, |
|
"loss": 0.42, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 5.456113345068306, |
|
"learning_rate": 2.3983582892293642e-06, |
|
"loss": 0.3625, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 9.722994068985187, |
|
"learning_rate": 2.3742262874048583e-06, |
|
"loss": 0.5455, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 10.210377059221097, |
|
"learning_rate": 2.3501999425777433e-06, |
|
"loss": 0.4177, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 9.986453075138304, |
|
"learning_rate": 2.326279587639031e-06, |
|
"loss": 0.4519, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 7.444568050178271, |
|
"learning_rate": 2.3024655540111984e-06, |
|
"loss": 0.4707, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 6.882963377525352, |
|
"learning_rate": 2.2787581716436257e-06, |
|
"loss": 0.4142, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 5.621837753619931, |
|
"learning_rate": 2.255157769008011e-06, |
|
"loss": 0.3837, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 8.979013360736435, |
|
"learning_rate": 2.2316646730938197e-06, |
|
"loss": 0.4511, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 9.387346843179332, |
|
"learning_rate": 2.2082792094037585e-06, |
|
"loss": 0.3837, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 11.152697788037019, |
|
"learning_rate": 2.185001701949263e-06, |
|
"loss": 0.3815, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 7.316279559405673, |
|
"learning_rate": 2.1618324732459993e-06, |
|
"loss": 0.5763, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 4.60864460318581, |
|
"learning_rate": 2.138771844309423e-06, |
|
"loss": 0.3784, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 8.893733826091104, |
|
"learning_rate": 2.1158201346502927e-06, |
|
"loss": 0.3993, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 8.857140540604258, |
|
"learning_rate": 2.092977662270275e-06, |
|
"loss": 0.3334, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 9.000582629375513, |
|
"learning_rate": 2.0702447436575223e-06, |
|
"loss": 0.5377, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 11.407276883996886, |
|
"learning_rate": 2.0476216937822967e-06, |
|
"loss": 0.4657, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 9.343576283040107, |
|
"learning_rate": 2.0251088260925967e-06, |
|
"loss": 0.5038, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 5.742436296561597, |
|
"learning_rate": 2.002706452509824e-06, |
|
"loss": 0.462, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 9.651440289066235, |
|
"learning_rate": 1.9804148834244465e-06, |
|
"loss": 0.4588, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 5.795036174327087, |
|
"learning_rate": 1.9582344276917233e-06, |
|
"loss": 0.5565, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 7.475417877764932, |
|
"learning_rate": 1.9361653926274016e-06, |
|
"loss": 0.3644, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 7.2305893277419235, |
|
"learning_rate": 1.9142080840034647e-06, |
|
"loss": 0.451, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 7.9485644539877365, |
|
"learning_rate": 1.8923628060439037e-06, |
|
"loss": 0.5415, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 7.339019736495735, |
|
"learning_rate": 1.8706298614204942e-06, |
|
"loss": 0.4258, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 6.229226290130147, |
|
"learning_rate": 1.8490095512486072e-06, |
|
"loss": 0.372, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 8.318686997660855, |
|
"learning_rate": 1.8275021750830368e-06, |
|
"loss": 0.4337, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 11.383779868869713, |
|
"learning_rate": 1.8061080309138379e-06, |
|
"loss": 0.5517, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 4.319009051803344, |
|
"learning_rate": 1.7848274151622236e-06, |
|
"loss": 0.4032, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 6.434125961202376, |
|
"learning_rate": 1.7636606226764353e-06, |
|
"loss": 0.4165, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 5.041886706286037, |
|
"learning_rate": 1.742607946727658e-06, |
|
"loss": 0.4621, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 6.183213056857235, |
|
"learning_rate": 1.7216696790059718e-06, |
|
"loss": 0.423, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 6.373680744920821, |
|
"learning_rate": 1.7008461096162999e-06, |
|
"loss": 0.5464, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 11.493069464165945, |
|
"learning_rate": 1.6801375270743925e-06, |
|
"loss": 0.4518, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 9.805183898852514, |
|
"learning_rate": 1.6595442183028266e-06, |
|
"loss": 0.4836, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 8.05887231146552, |
|
"learning_rate": 1.6390664686270342e-06, |
|
"loss": 0.3711, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 6.975427069143952, |
|
"learning_rate": 1.6187045617713459e-06, |
|
"loss": 0.4149, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 7.272454723310934, |
|
"learning_rate": 1.5984587798550633e-06, |
|
"loss": 0.4357, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 6.941951052506783, |
|
"learning_rate": 1.578329403388541e-06, |
|
"loss": 0.3835, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 5.800438948527679, |
|
"learning_rate": 1.5583167112693153e-06, |
|
"loss": 0.4408, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 5.3480222782611175, |
|
"learning_rate": 1.5384209807782291e-06, |
|
"loss": 0.4553, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 5.909917855292465, |
|
"learning_rate": 1.518642487575591e-06, |
|
"loss": 0.497, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 7.683087880393552, |
|
"learning_rate": 1.49898150569736e-06, |
|
"loss": 0.4535, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 7.446327856244201, |
|
"learning_rate": 1.4794383075513453e-06, |
|
"loss": 0.3873, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 6.360170134685136, |
|
"learning_rate": 1.460013163913433e-06, |
|
"loss": 0.5037, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 5.053653216562189, |
|
"learning_rate": 1.4407063439238333e-06, |
|
"loss": 0.4954, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 4.2483525961998945, |
|
"learning_rate": 1.421518115083359e-06, |
|
"loss": 0.3751, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 6.345969600489226, |
|
"learning_rate": 1.4024487432497013e-06, |
|
"loss": 0.4143, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 5.646370329937249, |
|
"learning_rate": 1.3834984926337657e-06, |
|
"loss": 0.3376, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 10.69680196625431, |
|
"learning_rate": 1.36466762579601e-06, |
|
"loss": 0.5439, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 9.34187811581496, |
|
"learning_rate": 1.3459564036427841e-06, |
|
"loss": 0.4761, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 8.322291418258063, |
|
"learning_rate": 1.3273650854227438e-06, |
|
"loss": 0.4699, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 6.089668194777211, |
|
"learning_rate": 1.3088939287232383e-06, |
|
"loss": 0.4135, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 6.349508519106518, |
|
"learning_rate": 1.2905431894667552e-06, |
|
"loss": 0.4429, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 6.0067928128468955, |
|
"learning_rate": 1.272313121907366e-06, |
|
"loss": 0.5506, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 5.057193603204914, |
|
"learning_rate": 1.2542039786272008e-06, |
|
"loss": 0.4029, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 6.9264232438262, |
|
"learning_rate": 1.236216010532959e-06, |
|
"loss": 0.399, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 8.111117092935682, |
|
"learning_rate": 1.218349466852432e-06, |
|
"loss": 0.3969, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 12.617119783922481, |
|
"learning_rate": 1.200604595131033e-06, |
|
"loss": 0.3632, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 4.997880390654607, |
|
"learning_rate": 1.1829816412283912e-06, |
|
"loss": 0.3727, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 9.925641311100412, |
|
"learning_rate": 1.1654808493149283e-06, |
|
"loss": 0.4779, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 10.259671670493303, |
|
"learning_rate": 1.1481024618684821e-06, |
|
"loss": 0.4792, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 4.666467514007945, |
|
"learning_rate": 1.1308467196709505e-06, |
|
"loss": 0.3665, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 5.8390766285836895, |
|
"learning_rate": 1.1137138618049403e-06, |
|
"loss": 0.4426, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 4.079596026020399, |
|
"learning_rate": 1.096704125650474e-06, |
|
"loss": 0.3858, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 6.355034616703483, |
|
"learning_rate": 1.079817746881696e-06, |
|
"loss": 0.4322, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 4.964202739872203, |
|
"learning_rate": 1.0630549594635896e-06, |
|
"loss": 0.4246, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 9.981500108734732, |
|
"learning_rate": 1.0464159956487596e-06, |
|
"loss": 0.4048, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 5.314444267142196, |
|
"learning_rate": 1.0299010859742009e-06, |
|
"loss": 0.4226, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 6.711068748283136, |
|
"learning_rate": 1.013510459258108e-06, |
|
"loss": 0.4626, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 6.87746096070976, |
|
"learning_rate": 9.972443425967026e-07, |
|
"loss": 0.4762, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 5.584995566992966, |
|
"learning_rate": 9.811029613610913e-07, |
|
"loss": 0.4638, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 5.476555647757321, |
|
"learning_rate": 9.650865391941345e-07, |
|
"loss": 0.4174, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 5.334567697843729, |
|
"learning_rate": 9.491952980073604e-07, |
|
"loss": 0.3264, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 7.864733082511921, |
|
"learning_rate": 9.334294579778813e-07, |
|
"loss": 0.3613, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 8.345283693272092, |
|
"learning_rate": 9.177892375453413e-07, |
|
"loss": 0.5469, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 8.885940166363653, |
|
"learning_rate": 9.022748534088988e-07, |
|
"loss": 0.4455, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 7.55591370496932, |
|
"learning_rate": 8.86886520524216e-07, |
|
"loss": 0.4356, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 7.5174900881299465, |
|
"learning_rate": 8.716244521004846e-07, |
|
"loss": 0.4074, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 7.226452727128449, |
|
"learning_rate": 8.564888595974718e-07, |
|
"loss": 0.473, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 7.586424458860782, |
|
"learning_rate": 8.414799527225826e-07, |
|
"loss": 0.4757, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 5.51882334743589, |
|
"learning_rate": 8.265979394279732e-07, |
|
"loss": 0.4272, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 6.945131528688248, |
|
"learning_rate": 8.118430259076493e-07, |
|
"loss": 0.3003, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 7.957372619224033, |
|
"learning_rate": 7.972154165946155e-07, |
|
"loss": 0.4485, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 7.203632243202649, |
|
"learning_rate": 7.827153141580501e-07, |
|
"loss": 0.384, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 5.973567270845765, |
|
"learning_rate": 7.683429195004932e-07, |
|
"loss": 0.415, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 7.742846323788447, |
|
"learning_rate": 7.540984317550548e-07, |
|
"loss": 0.4261, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 4.971511012508995, |
|
"learning_rate": 7.399820482826692e-07, |
|
"loss": 0.3073, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 7.417662373162329, |
|
"learning_rate": 7.25993964669347e-07, |
|
"loss": 0.4293, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 8.474162845843916, |
|
"learning_rate": 7.12134374723481e-07, |
|
"loss": 0.5288, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 6.74629233498178, |
|
"learning_rate": 6.984034704731479e-07, |
|
"loss": 0.379, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 6.51287661591934, |
|
"learning_rate": 6.848014421634497e-07, |
|
"loss": 0.4254, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 8.203119397161476, |
|
"learning_rate": 6.71328478253881e-07, |
|
"loss": 0.4818, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 7.091765460672564, |
|
"learning_rate": 6.579847654157234e-07, |
|
"loss": 0.3937, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 5.848337659332669, |
|
"learning_rate": 6.447704885294437e-07, |
|
"loss": 0.3223, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 11.861790191542282, |
|
"learning_rate": 6.316858306821449e-07, |
|
"loss": 0.3955, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 5.89823677782504, |
|
"learning_rate": 6.18730973165027e-07, |
|
"loss": 0.52, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 7.297372429370729, |
|
"learning_rate": 6.05906095470874e-07, |
|
"loss": 0.5625, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 8.018266432717773, |
|
"learning_rate": 5.932113752915659e-07, |
|
"loss": 0.5943, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 5.441022930966068, |
|
"learning_rate": 5.806469885156163e-07, |
|
"loss": 0.3584, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 7.923651656498159, |
|
"learning_rate": 5.682131092257381e-07, |
|
"loss": 0.4374, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 4.401381730215806, |
|
"learning_rate": 5.55909909696436e-07, |
|
"loss": 0.4548, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 9.323436877274784, |
|
"learning_rate": 5.437375603916039e-07, |
|
"loss": 0.3949, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 7.6852951141865455, |
|
"learning_rate": 5.316962299621808e-07, |
|
"loss": 0.4357, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 5.468965570424503, |
|
"learning_rate": 5.19786085243803e-07, |
|
"loss": 0.4446, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 6.3812841292130855, |
|
"learning_rate": 5.080072912544987e-07, |
|
"loss": 0.4428, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 10.852851737536733, |
|
"learning_rate": 4.96360011192396e-07, |
|
"loss": 0.4991, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 5.7584503904522775, |
|
"learning_rate": 4.848444064334679e-07, |
|
"loss": 0.5114, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 6.376038086977472, |
|
"learning_rate": 4.734606365292871e-07, |
|
"loss": 0.3782, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 6.224991564333229, |
|
"learning_rate": 4.6220885920483014e-07, |
|
"loss": 0.3527, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 5.203231071738429, |
|
"learning_rate": 4.510892303562797e-07, |
|
"loss": 0.3518, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 5.884197350261294, |
|
"learning_rate": 4.401019040488652e-07, |
|
"loss": 0.4493, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 8.438339641746209, |
|
"learning_rate": 4.2924703251473355e-07, |
|
"loss": 0.5192, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 9.929683884140545, |
|
"learning_rate": 4.1852476615083957e-07, |
|
"loss": 0.4769, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 7.444778653156325, |
|
"learning_rate": 4.0793525351685905e-07, |
|
"loss": 0.3915, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 5.55575801178513, |
|
"learning_rate": 3.974786413331311e-07, |
|
"loss": 0.3861, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 5.154553375207641, |
|
"learning_rate": 3.8715507447862455e-07, |
|
"loss": 0.3713, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 9.250721341718682, |
|
"learning_rate": 3.7696469598893727e-07, |
|
"loss": 0.4575, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 5.654967363493775, |
|
"learning_rate": 3.669076470543054e-07, |
|
"loss": 0.4686, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 5.710919914401177, |
|
"learning_rate": 3.569840670176483e-07, |
|
"loss": 0.4339, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 6.7079973379018085, |
|
"learning_rate": 3.4719409337264473e-07, |
|
"loss": 0.4238, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 10.924750798758199, |
|
"learning_rate": 3.3753786176182303e-07, |
|
"loss": 0.4977, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 4.9608766209441315, |
|
"learning_rate": 3.2801550597468033e-07, |
|
"loss": 0.2732, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 7.448404330000091, |
|
"learning_rate": 3.186271579458333e-07, |
|
"loss": 0.4483, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 5.022846446335248, |
|
"learning_rate": 3.093729477531826e-07, |
|
"loss": 0.4363, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 7.140771043711367, |
|
"learning_rate": 3.002530036161222e-07, |
|
"loss": 0.4566, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 5.57307898414647, |
|
"learning_rate": 2.912674518937553e-07, |
|
"loss": 0.3565, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 7.503093403901029, |
|
"learning_rate": 2.824164170831389e-07, |
|
"loss": 0.3495, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 8.266577699990709, |
|
"learning_rate": 2.737000218175712e-07, |
|
"loss": 0.458, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 8.007643327554275, |
|
"learning_rate": 2.651183868648821e-07, |
|
"loss": 0.3738, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 6.8532106769801215, |
|
"learning_rate": 2.566716311257655e-07, |
|
"loss": 0.4602, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 8.226940714871725, |
|
"learning_rate": 2.483598716321289e-07, |
|
"loss": 0.3939, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 7.35412581775661, |
|
"learning_rate": 2.4018322354547483e-07, |
|
"loss": 0.4912, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 5.189789718764145, |
|
"learning_rate": 2.321418001553022e-07, |
|
"loss": 0.3516, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 5.643929108838239, |
|
"learning_rate": 2.2423571287753853e-07, |
|
"loss": 0.3811, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 5.764646780403799, |
|
"learning_rate": 2.1646507125299587e-07, |
|
"loss": 0.4442, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 5.42774429745915, |
|
"learning_rate": 2.0882998294585177e-07, |
|
"loss": 0.3746, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 6.171794557047173, |
|
"learning_rate": 2.013305537421606e-07, |
|
"loss": 0.447, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 5.130410450658874, |
|
"learning_rate": 1.9396688754838355e-07, |
|
"loss": 0.3469, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 7.39720002220979, |
|
"learning_rate": 1.867390863899543e-07, |
|
"loss": 0.3783, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 7.90865739350042, |
|
"learning_rate": 1.796472504098612e-07, |
|
"loss": 0.4422, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 5.815959678060295, |
|
"learning_rate": 1.726914778672606e-07, |
|
"loss": 0.378, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 5.081919878256831, |
|
"learning_rate": 1.6587186513611685e-07, |
|
"loss": 0.3205, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 6.567585188289477, |
|
"learning_rate": 1.5918850670386677e-07, |
|
"loss": 0.44, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 7.533656800692689, |
|
"learning_rate": 1.5264149517010718e-07, |
|
"loss": 0.4362, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 5.4628885270524865, |
|
"learning_rate": 1.4623092124531613e-07, |
|
"loss": 0.4497, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 5.858826602545033, |
|
"learning_rate": 1.399568737495971e-07, |
|
"loss": 0.43, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 8.572231557747521, |
|
"learning_rate": 1.3381943961144118e-07, |
|
"loss": 0.3925, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 5.873954819161939, |
|
"learning_rate": 1.2781870386653016e-07, |
|
"loss": 0.3683, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 4.581416916538669, |
|
"learning_rate": 1.2195474965655652e-07, |
|
"loss": 0.2704, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 6.965146521509025, |
|
"learning_rate": 1.1622765822806636e-07, |
|
"loss": 0.3669, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 5.699569364212127, |
|
"learning_rate": 1.1063750893134273e-07, |
|
"loss": 0.4125, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 7.0422218435015855, |
|
"learning_rate": 1.0518437921929747e-07, |
|
"loss": 0.3156, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 9.329828931384606, |
|
"learning_rate": 9.986834464640328e-08, |
|
"loss": 0.4834, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 5.211053129225831, |
|
"learning_rate": 9.468947886764557e-08, |
|
"loss": 0.4188, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 5.70280316894895, |
|
"learning_rate": 8.964785363750228e-08, |
|
"loss": 0.3474, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 5.824204755244337, |
|
"learning_rate": 8.474353880894904e-08, |
|
"loss": 0.2739, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 7.610004827598348, |
|
"learning_rate": 7.997660233249105e-08, |
|
"loss": 0.4407, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 6.558806283300632, |
|
"learning_rate": 7.534711025522168e-08, |
|
"loss": 0.3602, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 6.692057122360635, |
|
"learning_rate": 7.08551267199098e-08, |
|
"loss": 0.4439, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 6.4300501195085555, |
|
"learning_rate": 6.650071396410718e-08, |
|
"loss": 0.428, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 6.994677903035368, |
|
"learning_rate": 6.22839323192892e-08, |
|
"loss": 0.517, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 7.2674316715963325, |
|
"learning_rate": 5.820484021001993e-08, |
|
"loss": 0.3041, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 8.22191067382583, |
|
"learning_rate": 5.426349415313503e-08, |
|
"loss": 0.4481, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 5.065088639546076, |
|
"learning_rate": 5.0459948756967914e-08, |
|
"loss": 0.3333, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 8.073376899592944, |
|
"learning_rate": 4.679425672059035e-08, |
|
"loss": 0.4493, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 6.784518754930513, |
|
"learning_rate": 4.326646883307972e-08, |
|
"loss": 0.4454, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 8.57495143340707, |
|
"learning_rate": 3.987663397281627e-08, |
|
"loss": 0.4892, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 5.72551957891776, |
|
"learning_rate": 3.662479910681027e-08, |
|
"loss": 0.4259, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 4.571968724650214, |
|
"learning_rate": 3.3511009290042585e-08, |
|
"loss": 0.364, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 7.563468099974791, |
|
"learning_rate": 3.0535307664848466e-08, |
|
"loss": 0.4774, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 6.8428676776213555, |
|
"learning_rate": 2.7697735460316954e-08, |
|
"loss": 0.4322, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 3.9770591980462786, |
|
"learning_rate": 2.499833199171908e-08, |
|
"loss": 0.3225, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 5.666940466332362, |
|
"learning_rate": 2.2437134659962777e-08, |
|
"loss": 0.4234, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 5.153046102603702, |
|
"learning_rate": 2.0014178951076602e-08, |
|
"loss": 0.471, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 5.709388029240762, |
|
"learning_rate": 1.7729498435716808e-08, |
|
"loss": 0.4391, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 5.983125443983794, |
|
"learning_rate": 1.5583124768701052e-08, |
|
"loss": 0.483, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 6.445238846878224, |
|
"learning_rate": 1.3575087688570965e-08, |
|
"loss": 0.3558, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 6.816452865113282, |
|
"learning_rate": 1.1705415017183585e-08, |
|
"loss": 0.413, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 5.457722190574727, |
|
"learning_rate": 9.974132659319457e-09, |
|
"loss": 0.3282, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 6.969886154201284, |
|
"learning_rate": 8.381264602327354e-09, |
|
"loss": 0.438, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 938, |
|
"total_flos": 373627158528000.0, |
|
"train_loss": 0.7472518730773601, |
|
"train_runtime": 19046.2174, |
|
"train_samples_per_second": 6.306, |
|
"train_steps_per_second": 0.049 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 938, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 200, |
|
"total_flos": 373627158528000.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|