diff --git "a/checkpoint-1476/trainer_state.json" "b/checkpoint-1476/trainer_state.json" new file mode 100644--- /dev/null +++ "b/checkpoint-1476/trainer_state.json" @@ -0,0 +1,10365 @@ +{ + "best_metric": null, + "best_model_checkpoint": null, + "epoch": 2.001354096140826, + "eval_steps": 500, + "global_step": 1476, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.0013540961408259986, + "grad_norm": 2.4126635555318527, + "learning_rate": 2.5000000000000004e-07, + "loss": 3.1189, + "step": 1 + }, + { + "epoch": 0.002708192281651997, + "grad_norm": 2.8141112841232654, + "learning_rate": 5.000000000000001e-07, + "loss": 2.6861, + "step": 2 + }, + { + "epoch": 0.004062288422477996, + "grad_norm": 2.6738862039513274, + "learning_rate": 7.5e-07, + "loss": 3.3382, + "step": 3 + }, + { + "epoch": 0.005416384563303994, + "grad_norm": 2.30287728842049, + "learning_rate": 1.0000000000000002e-06, + "loss": 2.4565, + "step": 4 + }, + { + "epoch": 0.006770480704129994, + "grad_norm": 42.907191662846444, + "learning_rate": 1.25e-06, + "loss": 2.5327, + "step": 5 + }, + { + "epoch": 0.008124576844955992, + "grad_norm": 2.9404364361574724, + "learning_rate": 1.5e-06, + "loss": 2.7222, + "step": 6 + }, + { + "epoch": 0.009478672985781991, + "grad_norm": 1.9739382530413463, + "learning_rate": 1.75e-06, + "loss": 2.1649, + "step": 7 + }, + { + "epoch": 0.010832769126607989, + "grad_norm": 3.463912464916458, + "learning_rate": 2.0000000000000003e-06, + "loss": 2.9378, + "step": 8 + }, + { + "epoch": 0.012186865267433988, + "grad_norm": 2.207345459970691, + "learning_rate": 2.25e-06, + "loss": 3.6521, + "step": 9 + }, + { + "epoch": 0.013540961408259987, + "grad_norm": 2.752714935099265, + "learning_rate": 2.5e-06, + "loss": 2.5672, + "step": 10 + }, + { + "epoch": 0.014895057549085985, + "grad_norm": 2.7722408021236187, + "learning_rate": 2.7500000000000004e-06, + "loss": 3.0411, + "step": 11 + }, + { + "epoch": 0.016249153689911984, + "grad_norm": 2.9367100387488185, + "learning_rate": 3e-06, + "loss": 2.8985, + "step": 12 + }, + { + "epoch": 0.017603249830737983, + "grad_norm": 2.7601030286888797, + "learning_rate": 3.2500000000000002e-06, + "loss": 2.8232, + "step": 13 + }, + { + "epoch": 0.018957345971563982, + "grad_norm": 3.184703964873984, + "learning_rate": 3.5e-06, + "loss": 2.9731, + "step": 14 + }, + { + "epoch": 0.020311442112389978, + "grad_norm": 2.2952697179626105, + "learning_rate": 3.7500000000000005e-06, + "loss": 2.823, + "step": 15 + }, + { + "epoch": 0.021665538253215978, + "grad_norm": 2.4601547818230913, + "learning_rate": 4.000000000000001e-06, + "loss": 2.7683, + "step": 16 + }, + { + "epoch": 0.023019634394041977, + "grad_norm": 2.0169369154885217, + "learning_rate": 4.25e-06, + "loss": 2.566, + "step": 17 + }, + { + "epoch": 0.024373730534867976, + "grad_norm": 1.8874822039928005, + "learning_rate": 4.5e-06, + "loss": 2.4032, + "step": 18 + }, + { + "epoch": 0.025727826675693975, + "grad_norm": 1.4345346790759714, + "learning_rate": 4.75e-06, + "loss": 2.8099, + "step": 19 + }, + { + "epoch": 0.027081922816519974, + "grad_norm": 2.5317082125734025, + "learning_rate": 5e-06, + "loss": 2.4169, + "step": 20 + }, + { + "epoch": 0.02843601895734597, + "grad_norm": 1.4417852513317821, + "learning_rate": 5.2500000000000006e-06, + "loss": 3.0868, + "step": 21 + }, + { + "epoch": 0.02979011509817197, + "grad_norm": 0.9044314800944161, + "learning_rate": 5.500000000000001e-06, + "loss": 2.7374, + "step": 22 + }, + { + "epoch": 0.03114421123899797, + "grad_norm": 1.4184446315710213, + "learning_rate": 5.75e-06, + "loss": 2.5317, + "step": 23 + }, + { + "epoch": 0.03249830737982397, + "grad_norm": 1.6900469714019433, + "learning_rate": 6e-06, + "loss": 2.4404, + "step": 24 + }, + { + "epoch": 0.033852403520649964, + "grad_norm": 1.0886842936641719, + "learning_rate": 6.25e-06, + "loss": 2.8003, + "step": 25 + }, + { + "epoch": 0.035206499661475966, + "grad_norm": 1.463135149887311, + "learning_rate": 6.5000000000000004e-06, + "loss": 1.8819, + "step": 26 + }, + { + "epoch": 0.03656059580230196, + "grad_norm": 1.1222026842497739, + "learning_rate": 6.750000000000001e-06, + "loss": 2.2671, + "step": 27 + }, + { + "epoch": 0.037914691943127965, + "grad_norm": 1.0087919882501093, + "learning_rate": 7e-06, + "loss": 2.4491, + "step": 28 + }, + { + "epoch": 0.03926878808395396, + "grad_norm": 1.2761649114763567, + "learning_rate": 7.25e-06, + "loss": 2.7121, + "step": 29 + }, + { + "epoch": 0.040622884224779957, + "grad_norm": 1.0185872249128933, + "learning_rate": 7.500000000000001e-06, + "loss": 2.6497, + "step": 30 + }, + { + "epoch": 0.04197698036560596, + "grad_norm": 1.2789270965641044, + "learning_rate": 7.75e-06, + "loss": 2.668, + "step": 31 + }, + { + "epoch": 0.043331076506431955, + "grad_norm": 1.1115471485822677, + "learning_rate": 8.000000000000001e-06, + "loss": 2.6767, + "step": 32 + }, + { + "epoch": 0.04468517264725796, + "grad_norm": 1.0627476896958792, + "learning_rate": 8.25e-06, + "loss": 2.6262, + "step": 33 + }, + { + "epoch": 0.046039268788083954, + "grad_norm": 1.1212584420019067, + "learning_rate": 8.5e-06, + "loss": 2.4854, + "step": 34 + }, + { + "epoch": 0.04739336492890995, + "grad_norm": 0.714156466912055, + "learning_rate": 8.750000000000001e-06, + "loss": 2.3662, + "step": 35 + }, + { + "epoch": 0.04874746106973595, + "grad_norm": 1.4520911693345544, + "learning_rate": 9e-06, + "loss": 2.7126, + "step": 36 + }, + { + "epoch": 0.05010155721056195, + "grad_norm": 1.493660601298817, + "learning_rate": 9.250000000000001e-06, + "loss": 3.0131, + "step": 37 + }, + { + "epoch": 0.05145565335138795, + "grad_norm": 1.2721754658047073, + "learning_rate": 9.5e-06, + "loss": 3.0784, + "step": 38 + }, + { + "epoch": 0.052809749492213946, + "grad_norm": 0.8803541178840371, + "learning_rate": 9.75e-06, + "loss": 2.1007, + "step": 39 + }, + { + "epoch": 0.05416384563303995, + "grad_norm": 2.0655028083700504, + "learning_rate": 1e-05, + "loss": 2.5182, + "step": 40 + }, + { + "epoch": 0.055517941773865945, + "grad_norm": 1.0487372085128044, + "learning_rate": 9.999997090241333e-06, + "loss": 2.2771, + "step": 41 + }, + { + "epoch": 0.05687203791469194, + "grad_norm": 0.8160509238097648, + "learning_rate": 9.999988360968714e-06, + "loss": 2.2042, + "step": 42 + }, + { + "epoch": 0.05822613405551794, + "grad_norm": 0.8864211341294704, + "learning_rate": 9.999973812192306e-06, + "loss": 2.7951, + "step": 43 + }, + { + "epoch": 0.05958023019634394, + "grad_norm": 0.7681880276765591, + "learning_rate": 9.99995344392904e-06, + "loss": 2.859, + "step": 44 + }, + { + "epoch": 0.06093432633716994, + "grad_norm": 1.7239414196388092, + "learning_rate": 9.999927256202626e-06, + "loss": 2.7866, + "step": 45 + }, + { + "epoch": 0.06228842247799594, + "grad_norm": 0.7879562468694528, + "learning_rate": 9.999895249043542e-06, + "loss": 2.1671, + "step": 46 + }, + { + "epoch": 0.06364251861882193, + "grad_norm": 0.8376315627603867, + "learning_rate": 9.99985742248904e-06, + "loss": 2.6406, + "step": 47 + }, + { + "epoch": 0.06499661475964794, + "grad_norm": 0.8839739394970294, + "learning_rate": 9.999813776583148e-06, + "loss": 2.3163, + "step": 48 + }, + { + "epoch": 0.06635071090047394, + "grad_norm": 0.7373151185194805, + "learning_rate": 9.999764311376664e-06, + "loss": 2.5735, + "step": 49 + }, + { + "epoch": 0.06770480704129993, + "grad_norm": 1.1273015474311354, + "learning_rate": 9.999709026927162e-06, + "loss": 2.2133, + "step": 50 + }, + { + "epoch": 0.06905890318212593, + "grad_norm": 0.8478718145728893, + "learning_rate": 9.99964792329899e-06, + "loss": 2.4075, + "step": 51 + }, + { + "epoch": 0.07041299932295193, + "grad_norm": 1.0667576833889847, + "learning_rate": 9.999581000563265e-06, + "loss": 2.6946, + "step": 52 + }, + { + "epoch": 0.07176709546377792, + "grad_norm": 1.0620013368810508, + "learning_rate": 9.999508258797876e-06, + "loss": 2.6205, + "step": 53 + }, + { + "epoch": 0.07312119160460392, + "grad_norm": 0.8072331151341536, + "learning_rate": 9.999429698087491e-06, + "loss": 2.5132, + "step": 54 + }, + { + "epoch": 0.07447528774542993, + "grad_norm": 0.6900996192892075, + "learning_rate": 9.999345318523544e-06, + "loss": 2.5071, + "step": 55 + }, + { + "epoch": 0.07582938388625593, + "grad_norm": 0.7145430930616551, + "learning_rate": 9.999255120204248e-06, + "loss": 2.6579, + "step": 56 + }, + { + "epoch": 0.07718348002708192, + "grad_norm": 0.9813015245466205, + "learning_rate": 9.999159103234582e-06, + "loss": 2.1667, + "step": 57 + }, + { + "epoch": 0.07853757616790792, + "grad_norm": 0.6801613421398072, + "learning_rate": 9.999057267726304e-06, + "loss": 2.2016, + "step": 58 + }, + { + "epoch": 0.07989167230873392, + "grad_norm": 1.340722366857769, + "learning_rate": 9.998949613797937e-06, + "loss": 2.5416, + "step": 59 + }, + { + "epoch": 0.08124576844955991, + "grad_norm": 0.8044848626001605, + "learning_rate": 9.998836141574781e-06, + "loss": 2.6258, + "step": 60 + }, + { + "epoch": 0.08259986459038592, + "grad_norm": 0.9118107600823588, + "learning_rate": 9.99871685118891e-06, + "loss": 2.5182, + "step": 61 + }, + { + "epoch": 0.08395396073121192, + "grad_norm": 0.8103939602292338, + "learning_rate": 9.99859174277916e-06, + "loss": 2.4597, + "step": 62 + }, + { + "epoch": 0.08530805687203792, + "grad_norm": 0.7953248414753887, + "learning_rate": 9.99846081649115e-06, + "loss": 2.6165, + "step": 63 + }, + { + "epoch": 0.08666215301286391, + "grad_norm": 0.6885480399283996, + "learning_rate": 9.998324072477266e-06, + "loss": 2.3687, + "step": 64 + }, + { + "epoch": 0.08801624915368991, + "grad_norm": 0.968337926868628, + "learning_rate": 9.99818151089666e-06, + "loss": 2.4591, + "step": 65 + }, + { + "epoch": 0.08937034529451592, + "grad_norm": 0.7154001784843524, + "learning_rate": 9.998033131915266e-06, + "loss": 2.2175, + "step": 66 + }, + { + "epoch": 0.0907244414353419, + "grad_norm": 1.18262426992356, + "learning_rate": 9.997878935705778e-06, + "loss": 3.1191, + "step": 67 + }, + { + "epoch": 0.09207853757616791, + "grad_norm": 0.7400521396635642, + "learning_rate": 9.997718922447669e-06, + "loss": 2.5125, + "step": 68 + }, + { + "epoch": 0.09343263371699391, + "grad_norm": 0.8148295313823943, + "learning_rate": 9.997553092327174e-06, + "loss": 2.7714, + "step": 69 + }, + { + "epoch": 0.0947867298578199, + "grad_norm": 0.7378483977717124, + "learning_rate": 9.997381445537309e-06, + "loss": 2.3631, + "step": 70 + }, + { + "epoch": 0.0961408259986459, + "grad_norm": 0.6266190246235469, + "learning_rate": 9.997203982277852e-06, + "loss": 2.4936, + "step": 71 + }, + { + "epoch": 0.0974949221394719, + "grad_norm": 1.50407707220557, + "learning_rate": 9.997020702755353e-06, + "loss": 2.7048, + "step": 72 + }, + { + "epoch": 0.0988490182802979, + "grad_norm": 0.8300611098600416, + "learning_rate": 9.996831607183132e-06, + "loss": 2.7146, + "step": 73 + }, + { + "epoch": 0.1002031144211239, + "grad_norm": 0.7552213758897641, + "learning_rate": 9.996636695781276e-06, + "loss": 2.5399, + "step": 74 + }, + { + "epoch": 0.1015572105619499, + "grad_norm": 0.705291949087134, + "learning_rate": 9.996435968776646e-06, + "loss": 2.6843, + "step": 75 + }, + { + "epoch": 0.1029113067027759, + "grad_norm": 0.7673837145200612, + "learning_rate": 9.996229426402867e-06, + "loss": 2.7562, + "step": 76 + }, + { + "epoch": 0.10426540284360189, + "grad_norm": 0.7439456610505197, + "learning_rate": 9.996017068900335e-06, + "loss": 2.0488, + "step": 77 + }, + { + "epoch": 0.10561949898442789, + "grad_norm": 0.8945277132428896, + "learning_rate": 9.995798896516215e-06, + "loss": 2.48, + "step": 78 + }, + { + "epoch": 0.1069735951252539, + "grad_norm": 0.8901395084271732, + "learning_rate": 9.995574909504434e-06, + "loss": 2.6099, + "step": 79 + }, + { + "epoch": 0.1083276912660799, + "grad_norm": 0.824778949464918, + "learning_rate": 9.995345108125698e-06, + "loss": 2.69, + "step": 80 + }, + { + "epoch": 0.10968178740690589, + "grad_norm": 0.8965013093653279, + "learning_rate": 9.995109492647467e-06, + "loss": 1.8424, + "step": 81 + }, + { + "epoch": 0.11103588354773189, + "grad_norm": 1.0115160015541755, + "learning_rate": 9.99486806334398e-06, + "loss": 2.3693, + "step": 82 + }, + { + "epoch": 0.11238997968855789, + "grad_norm": 0.8624086155070334, + "learning_rate": 9.994620820496234e-06, + "loss": 2.6173, + "step": 83 + }, + { + "epoch": 0.11374407582938388, + "grad_norm": 0.7454258690069137, + "learning_rate": 9.994367764391998e-06, + "loss": 2.6624, + "step": 84 + }, + { + "epoch": 0.11509817197020988, + "grad_norm": 1.0680963212316428, + "learning_rate": 9.994108895325802e-06, + "loss": 2.478, + "step": 85 + }, + { + "epoch": 0.11645226811103589, + "grad_norm": 0.8212102801807549, + "learning_rate": 9.993844213598949e-06, + "loss": 2.0231, + "step": 86 + }, + { + "epoch": 0.11780636425186188, + "grad_norm": 1.0684938905029466, + "learning_rate": 9.993573719519498e-06, + "loss": 2.595, + "step": 87 + }, + { + "epoch": 0.11916046039268788, + "grad_norm": 0.9867071296775374, + "learning_rate": 9.993297413402282e-06, + "loss": 2.3636, + "step": 88 + }, + { + "epoch": 0.12051455653351388, + "grad_norm": 1.404157430750734, + "learning_rate": 9.993015295568893e-06, + "loss": 2.1992, + "step": 89 + }, + { + "epoch": 0.12186865267433988, + "grad_norm": 0.8693790763825426, + "learning_rate": 9.992727366347688e-06, + "loss": 2.7016, + "step": 90 + }, + { + "epoch": 0.12322274881516587, + "grad_norm": 0.7501248710359847, + "learning_rate": 9.99243362607379e-06, + "loss": 3.2797, + "step": 91 + }, + { + "epoch": 0.12457684495599188, + "grad_norm": 1.0643003422752406, + "learning_rate": 9.992134075089085e-06, + "loss": 2.5437, + "step": 92 + }, + { + "epoch": 0.12593094109681788, + "grad_norm": 0.9103830863991241, + "learning_rate": 9.991828713742218e-06, + "loss": 3.0088, + "step": 93 + }, + { + "epoch": 0.12728503723764387, + "grad_norm": 1.142388030489728, + "learning_rate": 9.991517542388605e-06, + "loss": 2.4977, + "step": 94 + }, + { + "epoch": 0.12863913337846988, + "grad_norm": 0.9920213403910143, + "learning_rate": 9.991200561390417e-06, + "loss": 2.5349, + "step": 95 + }, + { + "epoch": 0.12999322951929587, + "grad_norm": 0.8981930618864583, + "learning_rate": 9.990877771116588e-06, + "loss": 2.8531, + "step": 96 + }, + { + "epoch": 0.13134732566012186, + "grad_norm": 1.154244133374351, + "learning_rate": 9.990549171942817e-06, + "loss": 2.5311, + "step": 97 + }, + { + "epoch": 0.13270142180094788, + "grad_norm": 0.7466294529414305, + "learning_rate": 9.99021476425156e-06, + "loss": 2.4112, + "step": 98 + }, + { + "epoch": 0.13405551794177387, + "grad_norm": 1.1851681560394296, + "learning_rate": 9.989874548432037e-06, + "loss": 2.2467, + "step": 99 + }, + { + "epoch": 0.13540961408259986, + "grad_norm": 1.295896953150764, + "learning_rate": 9.989528524880225e-06, + "loss": 2.5688, + "step": 100 + }, + { + "epoch": 0.13676371022342587, + "grad_norm": 0.8892799849200678, + "learning_rate": 9.989176693998863e-06, + "loss": 2.151, + "step": 101 + }, + { + "epoch": 0.13811780636425186, + "grad_norm": 1.133636165142649, + "learning_rate": 9.988819056197448e-06, + "loss": 1.8402, + "step": 102 + }, + { + "epoch": 0.13947190250507785, + "grad_norm": 0.8016376414959372, + "learning_rate": 9.988455611892237e-06, + "loss": 2.3469, + "step": 103 + }, + { + "epoch": 0.14082599864590387, + "grad_norm": 0.964428420697917, + "learning_rate": 9.98808636150624e-06, + "loss": 2.3062, + "step": 104 + }, + { + "epoch": 0.14218009478672985, + "grad_norm": 0.7711620898648806, + "learning_rate": 9.987711305469232e-06, + "loss": 2.7206, + "step": 105 + }, + { + "epoch": 0.14353419092755584, + "grad_norm": 0.7778439840699306, + "learning_rate": 9.987330444217739e-06, + "loss": 2.4899, + "step": 106 + }, + { + "epoch": 0.14488828706838186, + "grad_norm": 0.9642875586985667, + "learning_rate": 9.986943778195052e-06, + "loss": 2.8345, + "step": 107 + }, + { + "epoch": 0.14624238320920785, + "grad_norm": 0.8268272296488341, + "learning_rate": 9.98655130785121e-06, + "loss": 2.5926, + "step": 108 + }, + { + "epoch": 0.14759647935003387, + "grad_norm": 0.7977529748864721, + "learning_rate": 9.986153033643011e-06, + "loss": 1.9835, + "step": 109 + }, + { + "epoch": 0.14895057549085985, + "grad_norm": 1.140904527079228, + "learning_rate": 9.985748956034007e-06, + "loss": 2.2239, + "step": 110 + }, + { + "epoch": 0.15030467163168584, + "grad_norm": 1.020890884482194, + "learning_rate": 9.985339075494504e-06, + "loss": 2.6764, + "step": 111 + }, + { + "epoch": 0.15165876777251186, + "grad_norm": 0.8869762247213703, + "learning_rate": 9.984923392501567e-06, + "loss": 2.2497, + "step": 112 + }, + { + "epoch": 0.15301286391333785, + "grad_norm": 0.9590605445485386, + "learning_rate": 9.98450190753901e-06, + "loss": 2.3991, + "step": 113 + }, + { + "epoch": 0.15436696005416384, + "grad_norm": 1.27578790045338, + "learning_rate": 9.984074621097397e-06, + "loss": 2.3551, + "step": 114 + }, + { + "epoch": 0.15572105619498985, + "grad_norm": 1.0374791947621171, + "learning_rate": 9.983641533674053e-06, + "loss": 2.6919, + "step": 115 + }, + { + "epoch": 0.15707515233581584, + "grad_norm": 0.923336386037252, + "learning_rate": 9.983202645773049e-06, + "loss": 2.6477, + "step": 116 + }, + { + "epoch": 0.15842924847664183, + "grad_norm": 0.8526247474365115, + "learning_rate": 9.982757957905204e-06, + "loss": 2.2264, + "step": 117 + }, + { + "epoch": 0.15978334461746785, + "grad_norm": 1.1258963168949678, + "learning_rate": 9.982307470588097e-06, + "loss": 2.5224, + "step": 118 + }, + { + "epoch": 0.16113744075829384, + "grad_norm": 0.7569639815489783, + "learning_rate": 9.98185118434605e-06, + "loss": 1.8855, + "step": 119 + }, + { + "epoch": 0.16249153689911983, + "grad_norm": 1.0355226629532543, + "learning_rate": 9.981389099710132e-06, + "loss": 3.0085, + "step": 120 + }, + { + "epoch": 0.16384563303994584, + "grad_norm": 1.1486309630139306, + "learning_rate": 9.980921217218173e-06, + "loss": 2.7178, + "step": 121 + }, + { + "epoch": 0.16519972918077183, + "grad_norm": 0.7466296304493713, + "learning_rate": 9.980447537414736e-06, + "loss": 2.251, + "step": 122 + }, + { + "epoch": 0.16655382532159782, + "grad_norm": 0.651170263411467, + "learning_rate": 9.979968060851144e-06, + "loss": 2.1519, + "step": 123 + }, + { + "epoch": 0.16790792146242384, + "grad_norm": 1.008261184550054, + "learning_rate": 9.979482788085455e-06, + "loss": 2.2199, + "step": 124 + }, + { + "epoch": 0.16926201760324983, + "grad_norm": 2.416142037739212, + "learning_rate": 9.978991719682486e-06, + "loss": 2.3314, + "step": 125 + }, + { + "epoch": 0.17061611374407584, + "grad_norm": 0.8174937699006882, + "learning_rate": 9.97849485621379e-06, + "loss": 2.5571, + "step": 126 + }, + { + "epoch": 0.17197020988490183, + "grad_norm": 1.0052455897777324, + "learning_rate": 9.977992198257668e-06, + "loss": 2.5827, + "step": 127 + }, + { + "epoch": 0.17332430602572782, + "grad_norm": 0.8663685595604006, + "learning_rate": 9.977483746399168e-06, + "loss": 2.5957, + "step": 128 + }, + { + "epoch": 0.17467840216655384, + "grad_norm": 0.8454778683126596, + "learning_rate": 9.976969501230074e-06, + "loss": 2.2387, + "step": 129 + }, + { + "epoch": 0.17603249830737983, + "grad_norm": 1.1589088141787116, + "learning_rate": 9.976449463348924e-06, + "loss": 2.721, + "step": 130 + }, + { + "epoch": 0.17738659444820581, + "grad_norm": 0.8357485092208772, + "learning_rate": 9.975923633360985e-06, + "loss": 2.1065, + "step": 131 + }, + { + "epoch": 0.17874069058903183, + "grad_norm": 1.0483474542727864, + "learning_rate": 9.975392011878278e-06, + "loss": 2.5342, + "step": 132 + }, + { + "epoch": 0.18009478672985782, + "grad_norm": 0.8310501434875607, + "learning_rate": 9.974854599519557e-06, + "loss": 2.9813, + "step": 133 + }, + { + "epoch": 0.1814488828706838, + "grad_norm": 1.1998035390299338, + "learning_rate": 9.974311396910317e-06, + "loss": 2.5218, + "step": 134 + }, + { + "epoch": 0.18280297901150983, + "grad_norm": 1.1877384820263168, + "learning_rate": 9.973762404682795e-06, + "loss": 3.0512, + "step": 135 + }, + { + "epoch": 0.18415707515233581, + "grad_norm": 0.7431690298915412, + "learning_rate": 9.973207623475964e-06, + "loss": 1.9473, + "step": 136 + }, + { + "epoch": 0.1855111712931618, + "grad_norm": 0.8085958372588746, + "learning_rate": 9.972647053935536e-06, + "loss": 2.2599, + "step": 137 + }, + { + "epoch": 0.18686526743398782, + "grad_norm": 1.0088420757147314, + "learning_rate": 9.972080696713962e-06, + "loss": 2.3532, + "step": 138 + }, + { + "epoch": 0.1882193635748138, + "grad_norm": 0.871056602633167, + "learning_rate": 9.971508552470424e-06, + "loss": 2.1344, + "step": 139 + }, + { + "epoch": 0.1895734597156398, + "grad_norm": 0.9551730724308998, + "learning_rate": 9.970930621870843e-06, + "loss": 1.7945, + "step": 140 + }, + { + "epoch": 0.1909275558564658, + "grad_norm": 0.9099059345338785, + "learning_rate": 9.970346905587875e-06, + "loss": 2.4697, + "step": 141 + }, + { + "epoch": 0.1922816519972918, + "grad_norm": 2.3602640235819288, + "learning_rate": 9.969757404300911e-06, + "loss": 2.3396, + "step": 142 + }, + { + "epoch": 0.19363574813811782, + "grad_norm": 1.0387196911625636, + "learning_rate": 9.969162118696072e-06, + "loss": 2.2526, + "step": 143 + }, + { + "epoch": 0.1949898442789438, + "grad_norm": 1.0660871258756586, + "learning_rate": 9.968561049466214e-06, + "loss": 2.2518, + "step": 144 + }, + { + "epoch": 0.1963439404197698, + "grad_norm": 0.994954020536128, + "learning_rate": 9.967954197310922e-06, + "loss": 2.1365, + "step": 145 + }, + { + "epoch": 0.1976980365605958, + "grad_norm": 1.1172902304571317, + "learning_rate": 9.967341562936515e-06, + "loss": 2.4633, + "step": 146 + }, + { + "epoch": 0.1990521327014218, + "grad_norm": 0.8851246859608983, + "learning_rate": 9.966723147056036e-06, + "loss": 2.3302, + "step": 147 + }, + { + "epoch": 0.2004062288422478, + "grad_norm": 0.9673278398671098, + "learning_rate": 9.966098950389268e-06, + "loss": 2.3481, + "step": 148 + }, + { + "epoch": 0.2017603249830738, + "grad_norm": 1.0923553526656322, + "learning_rate": 9.965468973662712e-06, + "loss": 2.7291, + "step": 149 + }, + { + "epoch": 0.2031144211238998, + "grad_norm": 0.6885203668960329, + "learning_rate": 9.9648332176096e-06, + "loss": 2.4216, + "step": 150 + }, + { + "epoch": 0.20446851726472579, + "grad_norm": 0.8835456349684598, + "learning_rate": 9.964191682969891e-06, + "loss": 2.5524, + "step": 151 + }, + { + "epoch": 0.2058226134055518, + "grad_norm": 1.110246201094945, + "learning_rate": 9.96354437049027e-06, + "loss": 2.7307, + "step": 152 + }, + { + "epoch": 0.2071767095463778, + "grad_norm": 0.8335391435494296, + "learning_rate": 9.962891280924148e-06, + "loss": 2.2845, + "step": 153 + }, + { + "epoch": 0.20853080568720378, + "grad_norm": 0.7269532952637685, + "learning_rate": 9.962232415031653e-06, + "loss": 2.2107, + "step": 154 + }, + { + "epoch": 0.2098849018280298, + "grad_norm": 0.9516173733706272, + "learning_rate": 9.961567773579645e-06, + "loss": 2.549, + "step": 155 + }, + { + "epoch": 0.21123899796885579, + "grad_norm": 0.9192964960712486, + "learning_rate": 9.960897357341703e-06, + "loss": 2.382, + "step": 156 + }, + { + "epoch": 0.21259309410968177, + "grad_norm": 0.8697364214950628, + "learning_rate": 9.960221167098124e-06, + "loss": 2.7404, + "step": 157 + }, + { + "epoch": 0.2139471902505078, + "grad_norm": 0.854037317845471, + "learning_rate": 9.959539203635931e-06, + "loss": 2.2796, + "step": 158 + }, + { + "epoch": 0.21530128639133378, + "grad_norm": 9.059153514312463, + "learning_rate": 9.958851467748863e-06, + "loss": 2.1798, + "step": 159 + }, + { + "epoch": 0.2166553825321598, + "grad_norm": 0.9333843627998799, + "learning_rate": 9.958157960237376e-06, + "loss": 2.3693, + "step": 160 + }, + { + "epoch": 0.21800947867298578, + "grad_norm": 1.3475036120106114, + "learning_rate": 9.957458681908647e-06, + "loss": 2.4024, + "step": 161 + }, + { + "epoch": 0.21936357481381177, + "grad_norm": 1.2121402320200159, + "learning_rate": 9.956753633576571e-06, + "loss": 2.5439, + "step": 162 + }, + { + "epoch": 0.2207176709546378, + "grad_norm": 0.7792210046361225, + "learning_rate": 9.956042816061752e-06, + "loss": 2.0299, + "step": 163 + }, + { + "epoch": 0.22207176709546378, + "grad_norm": 0.8226985573354776, + "learning_rate": 9.955326230191517e-06, + "loss": 2.8253, + "step": 164 + }, + { + "epoch": 0.22342586323628977, + "grad_norm": 0.8424020935830455, + "learning_rate": 9.9546038767999e-06, + "loss": 2.2754, + "step": 165 + }, + { + "epoch": 0.22477995937711578, + "grad_norm": 1.1561344660760495, + "learning_rate": 9.95387575672765e-06, + "loss": 1.9543, + "step": 166 + }, + { + "epoch": 0.22613405551794177, + "grad_norm": 0.8810904992087591, + "learning_rate": 9.953141870822232e-06, + "loss": 2.8316, + "step": 167 + }, + { + "epoch": 0.22748815165876776, + "grad_norm": 1.1327457989077157, + "learning_rate": 9.952402219937817e-06, + "loss": 2.1659, + "step": 168 + }, + { + "epoch": 0.22884224779959378, + "grad_norm": 0.847237764337931, + "learning_rate": 9.951656804935284e-06, + "loss": 2.5104, + "step": 169 + }, + { + "epoch": 0.23019634394041977, + "grad_norm": 1.3311662339401327, + "learning_rate": 9.950905626682229e-06, + "loss": 2.7411, + "step": 170 + }, + { + "epoch": 0.23155044008124576, + "grad_norm": 0.8754683872906716, + "learning_rate": 9.950148686052948e-06, + "loss": 2.2843, + "step": 171 + }, + { + "epoch": 0.23290453622207177, + "grad_norm": 1.1984097361442936, + "learning_rate": 9.949385983928446e-06, + "loss": 2.6191, + "step": 172 + }, + { + "epoch": 0.23425863236289776, + "grad_norm": 1.042317105372754, + "learning_rate": 9.948617521196438e-06, + "loss": 2.5479, + "step": 173 + }, + { + "epoch": 0.23561272850372375, + "grad_norm": 0.7787896671463191, + "learning_rate": 9.947843298751337e-06, + "loss": 2.3346, + "step": 174 + }, + { + "epoch": 0.23696682464454977, + "grad_norm": 1.2170235533278835, + "learning_rate": 9.947063317494265e-06, + "loss": 2.2332, + "step": 175 + }, + { + "epoch": 0.23832092078537576, + "grad_norm": 1.0390706514693488, + "learning_rate": 9.946277578333045e-06, + "loss": 2.5912, + "step": 176 + }, + { + "epoch": 0.23967501692620177, + "grad_norm": 1.4816304376017728, + "learning_rate": 9.945486082182201e-06, + "loss": 2.4719, + "step": 177 + }, + { + "epoch": 0.24102911306702776, + "grad_norm": 0.8487826972928669, + "learning_rate": 9.944688829962957e-06, + "loss": 2.2601, + "step": 178 + }, + { + "epoch": 0.24238320920785375, + "grad_norm": 1.0267911713585076, + "learning_rate": 9.94388582260324e-06, + "loss": 2.4376, + "step": 179 + }, + { + "epoch": 0.24373730534867977, + "grad_norm": 0.8526086813518827, + "learning_rate": 9.943077061037672e-06, + "loss": 2.6301, + "step": 180 + }, + { + "epoch": 0.24509140148950576, + "grad_norm": 1.356267698059979, + "learning_rate": 9.942262546207572e-06, + "loss": 2.723, + "step": 181 + }, + { + "epoch": 0.24644549763033174, + "grad_norm": 0.7426455232138849, + "learning_rate": 9.94144227906096e-06, + "loss": 2.4959, + "step": 182 + }, + { + "epoch": 0.24779959377115776, + "grad_norm": 0.8750438781520365, + "learning_rate": 9.940616260552545e-06, + "loss": 2.3425, + "step": 183 + }, + { + "epoch": 0.24915368991198375, + "grad_norm": 0.8785910140656594, + "learning_rate": 9.939784491643734e-06, + "loss": 2.2364, + "step": 184 + }, + { + "epoch": 0.25050778605280977, + "grad_norm": 0.8938618124220896, + "learning_rate": 9.938946973302624e-06, + "loss": 2.19, + "step": 185 + }, + { + "epoch": 0.25186188219363576, + "grad_norm": 1.021044776141451, + "learning_rate": 9.938103706504007e-06, + "loss": 2.7688, + "step": 186 + }, + { + "epoch": 0.25321597833446174, + "grad_norm": 1.2815667789992267, + "learning_rate": 9.937254692229363e-06, + "loss": 2.1036, + "step": 187 + }, + { + "epoch": 0.25457007447528773, + "grad_norm": 0.9597963383831872, + "learning_rate": 9.936399931466866e-06, + "loss": 2.7931, + "step": 188 + }, + { + "epoch": 0.2559241706161137, + "grad_norm": 1.1496492688566942, + "learning_rate": 9.935539425211371e-06, + "loss": 2.0287, + "step": 189 + }, + { + "epoch": 0.25727826675693977, + "grad_norm": 0.9029859331791751, + "learning_rate": 9.934673174464426e-06, + "loss": 2.555, + "step": 190 + }, + { + "epoch": 0.25863236289776576, + "grad_norm": 0.8623354344497336, + "learning_rate": 9.933801180234263e-06, + "loss": 2.4571, + "step": 191 + }, + { + "epoch": 0.25998645903859174, + "grad_norm": 1.172827727876581, + "learning_rate": 9.932923443535798e-06, + "loss": 2.5339, + "step": 192 + }, + { + "epoch": 0.26134055517941773, + "grad_norm": 0.7844413148427782, + "learning_rate": 9.932039965390634e-06, + "loss": 2.232, + "step": 193 + }, + { + "epoch": 0.2626946513202437, + "grad_norm": 2.4674022908729993, + "learning_rate": 9.931150746827055e-06, + "loss": 2.4686, + "step": 194 + }, + { + "epoch": 0.2640487474610697, + "grad_norm": 0.8938856211560369, + "learning_rate": 9.930255788880021e-06, + "loss": 2.9519, + "step": 195 + }, + { + "epoch": 0.26540284360189575, + "grad_norm": 0.9967031966791289, + "learning_rate": 9.92935509259118e-06, + "loss": 2.3506, + "step": 196 + }, + { + "epoch": 0.26675693974272174, + "grad_norm": 0.8181579212619565, + "learning_rate": 9.928448659008856e-06, + "loss": 2.3992, + "step": 197 + }, + { + "epoch": 0.26811103588354773, + "grad_norm": 0.9019033329287899, + "learning_rate": 9.927536489188047e-06, + "loss": 1.8896, + "step": 198 + }, + { + "epoch": 0.2694651320243737, + "grad_norm": 1.196294290686767, + "learning_rate": 9.926618584190435e-06, + "loss": 2.6578, + "step": 199 + }, + { + "epoch": 0.2708192281651997, + "grad_norm": 0.8815838170577491, + "learning_rate": 9.925694945084369e-06, + "loss": 2.8371, + "step": 200 + }, + { + "epoch": 0.27217332430602575, + "grad_norm": 0.933774886972981, + "learning_rate": 9.924765572944879e-06, + "loss": 2.0347, + "step": 201 + }, + { + "epoch": 0.27352742044685174, + "grad_norm": 0.9459867483651119, + "learning_rate": 9.923830468853662e-06, + "loss": 2.3687, + "step": 202 + }, + { + "epoch": 0.27488151658767773, + "grad_norm": 0.8281062486347639, + "learning_rate": 9.92288963389909e-06, + "loss": 2.1651, + "step": 203 + }, + { + "epoch": 0.2762356127285037, + "grad_norm": 0.7955305725702541, + "learning_rate": 9.921943069176203e-06, + "loss": 2.5368, + "step": 204 + }, + { + "epoch": 0.2775897088693297, + "grad_norm": 1.026304391265509, + "learning_rate": 9.920990775786712e-06, + "loss": 2.9058, + "step": 205 + }, + { + "epoch": 0.2789438050101557, + "grad_norm": 0.9971825829470802, + "learning_rate": 9.920032754838994e-06, + "loss": 2.8694, + "step": 206 + }, + { + "epoch": 0.28029790115098174, + "grad_norm": 1.2484687747540484, + "learning_rate": 9.919069007448093e-06, + "loss": 2.4603, + "step": 207 + }, + { + "epoch": 0.28165199729180773, + "grad_norm": 0.864015784119935, + "learning_rate": 9.91809953473572e-06, + "loss": 2.2217, + "step": 208 + }, + { + "epoch": 0.2830060934326337, + "grad_norm": 0.9647460693812894, + "learning_rate": 9.917124337830242e-06, + "loss": 2.4799, + "step": 209 + }, + { + "epoch": 0.2843601895734597, + "grad_norm": 1.0623146158662309, + "learning_rate": 9.916143417866702e-06, + "loss": 2.3235, + "step": 210 + }, + { + "epoch": 0.2857142857142857, + "grad_norm": 1.5128627766738143, + "learning_rate": 9.915156775986789e-06, + "loss": 2.6927, + "step": 211 + }, + { + "epoch": 0.2870683818551117, + "grad_norm": 0.9157505604273191, + "learning_rate": 9.914164413338863e-06, + "loss": 2.056, + "step": 212 + }, + { + "epoch": 0.28842247799593773, + "grad_norm": 2.1672553239870114, + "learning_rate": 9.913166331077937e-06, + "loss": 2.3637, + "step": 213 + }, + { + "epoch": 0.2897765741367637, + "grad_norm": 0.8565503250451203, + "learning_rate": 9.912162530365683e-06, + "loss": 2.3108, + "step": 214 + }, + { + "epoch": 0.2911306702775897, + "grad_norm": 0.8071561412028346, + "learning_rate": 9.911153012370427e-06, + "loss": 2.4094, + "step": 215 + }, + { + "epoch": 0.2924847664184157, + "grad_norm": 0.960350260627165, + "learning_rate": 9.910137778267153e-06, + "loss": 2.2326, + "step": 216 + }, + { + "epoch": 0.2938388625592417, + "grad_norm": 1.1434301374173532, + "learning_rate": 9.909116829237492e-06, + "loss": 2.1396, + "step": 217 + }, + { + "epoch": 0.29519295870006773, + "grad_norm": 1.0512962564460284, + "learning_rate": 9.908090166469733e-06, + "loss": 2.4862, + "step": 218 + }, + { + "epoch": 0.2965470548408937, + "grad_norm": 0.8803334872680652, + "learning_rate": 9.90705779115881e-06, + "loss": 2.3063, + "step": 219 + }, + { + "epoch": 0.2979011509817197, + "grad_norm": 0.7957632137271137, + "learning_rate": 9.90601970450631e-06, + "loss": 2.1209, + "step": 220 + }, + { + "epoch": 0.2992552471225457, + "grad_norm": 1.5559922150186727, + "learning_rate": 9.904975907720465e-06, + "loss": 2.745, + "step": 221 + }, + { + "epoch": 0.3006093432633717, + "grad_norm": 1.0287819888236789, + "learning_rate": 9.903926402016153e-06, + "loss": 2.3034, + "step": 222 + }, + { + "epoch": 0.3019634394041977, + "grad_norm": 0.8755596479469875, + "learning_rate": 9.902871188614898e-06, + "loss": 2.6008, + "step": 223 + }, + { + "epoch": 0.3033175355450237, + "grad_norm": 2.222977688536351, + "learning_rate": 9.901810268744868e-06, + "loss": 2.5897, + "step": 224 + }, + { + "epoch": 0.3046716316858497, + "grad_norm": 0.8294734280823934, + "learning_rate": 9.90074364364087e-06, + "loss": 2.373, + "step": 225 + }, + { + "epoch": 0.3060257278266757, + "grad_norm": 0.9955963385213202, + "learning_rate": 9.899671314544352e-06, + "loss": 2.8267, + "step": 226 + }, + { + "epoch": 0.3073798239675017, + "grad_norm": 0.9863487128858249, + "learning_rate": 9.898593282703402e-06, + "loss": 2.3585, + "step": 227 + }, + { + "epoch": 0.3087339201083277, + "grad_norm": 1.3476414208683485, + "learning_rate": 9.897509549372745e-06, + "loss": 2.0764, + "step": 228 + }, + { + "epoch": 0.31008801624915366, + "grad_norm": 1.1281976478830502, + "learning_rate": 9.896420115813741e-06, + "loss": 2.1232, + "step": 229 + }, + { + "epoch": 0.3114421123899797, + "grad_norm": 0.9905003772016358, + "learning_rate": 9.89532498329439e-06, + "loss": 2.0276, + "step": 230 + }, + { + "epoch": 0.3127962085308057, + "grad_norm": 0.9642354729606564, + "learning_rate": 9.894224153089313e-06, + "loss": 1.7903, + "step": 231 + }, + { + "epoch": 0.3141503046716317, + "grad_norm": 1.8609542881386758, + "learning_rate": 9.893117626479778e-06, + "loss": 2.6118, + "step": 232 + }, + { + "epoch": 0.3155044008124577, + "grad_norm": 1.0931602473722466, + "learning_rate": 9.892005404753669e-06, + "loss": 2.4775, + "step": 233 + }, + { + "epoch": 0.31685849695328366, + "grad_norm": 0.7800560012460497, + "learning_rate": 9.890887489205507e-06, + "loss": 2.1569, + "step": 234 + }, + { + "epoch": 0.3182125930941097, + "grad_norm": 1.150743273577776, + "learning_rate": 9.889763881136439e-06, + "loss": 2.4256, + "step": 235 + }, + { + "epoch": 0.3195666892349357, + "grad_norm": 0.9159836558576258, + "learning_rate": 9.888634581854235e-06, + "loss": 2.3495, + "step": 236 + }, + { + "epoch": 0.3209207853757617, + "grad_norm": 1.0174141433627475, + "learning_rate": 9.88749959267329e-06, + "loss": 2.8219, + "step": 237 + }, + { + "epoch": 0.3222748815165877, + "grad_norm": 0.85962610230906, + "learning_rate": 9.886358914914624e-06, + "loss": 2.3995, + "step": 238 + }, + { + "epoch": 0.32362897765741366, + "grad_norm": 0.8714138576280448, + "learning_rate": 9.885212549905874e-06, + "loss": 2.1309, + "step": 239 + }, + { + "epoch": 0.32498307379823965, + "grad_norm": 0.985120553099594, + "learning_rate": 9.884060498981297e-06, + "loss": 2.3078, + "step": 240 + }, + { + "epoch": 0.3263371699390657, + "grad_norm": 1.1509228640558309, + "learning_rate": 9.88290276348177e-06, + "loss": 2.762, + "step": 241 + }, + { + "epoch": 0.3276912660798917, + "grad_norm": 0.8569907806447795, + "learning_rate": 9.881739344754789e-06, + "loss": 2.4162, + "step": 242 + }, + { + "epoch": 0.3290453622207177, + "grad_norm": 0.8953655946187061, + "learning_rate": 9.880570244154455e-06, + "loss": 2.1708, + "step": 243 + }, + { + "epoch": 0.33039945836154366, + "grad_norm": 2.6172719168994782, + "learning_rate": 9.879395463041493e-06, + "loss": 2.4244, + "step": 244 + }, + { + "epoch": 0.33175355450236965, + "grad_norm": 1.1692362620244634, + "learning_rate": 9.87821500278323e-06, + "loss": 2.3573, + "step": 245 + }, + { + "epoch": 0.33310765064319564, + "grad_norm": 1.3260737783720347, + "learning_rate": 9.877028864753614e-06, + "loss": 2.2204, + "step": 246 + }, + { + "epoch": 0.3344617467840217, + "grad_norm": 1.1284130571617974, + "learning_rate": 9.87583705033319e-06, + "loss": 2.3806, + "step": 247 + }, + { + "epoch": 0.3358158429248477, + "grad_norm": 0.8740876722841778, + "learning_rate": 9.874639560909118e-06, + "loss": 1.8413, + "step": 248 + }, + { + "epoch": 0.33716993906567366, + "grad_norm": 0.8380346921284223, + "learning_rate": 9.87343639787516e-06, + "loss": 2.0977, + "step": 249 + }, + { + "epoch": 0.33852403520649965, + "grad_norm": 0.8382850128591168, + "learning_rate": 9.87222756263168e-06, + "loss": 2.1709, + "step": 250 + }, + { + "epoch": 0.33987813134732564, + "grad_norm": 1.1238927174638607, + "learning_rate": 9.871013056585646e-06, + "loss": 2.4974, + "step": 251 + }, + { + "epoch": 0.3412322274881517, + "grad_norm": 0.7904179666008488, + "learning_rate": 9.869792881150624e-06, + "loss": 2.6544, + "step": 252 + }, + { + "epoch": 0.3425863236289777, + "grad_norm": 1.5604245138186315, + "learning_rate": 9.868567037746784e-06, + "loss": 2.4196, + "step": 253 + }, + { + "epoch": 0.34394041976980366, + "grad_norm": 0.9541984067972759, + "learning_rate": 9.867335527800887e-06, + "loss": 2.437, + "step": 254 + }, + { + "epoch": 0.34529451591062965, + "grad_norm": 1.1385820836347318, + "learning_rate": 9.866098352746295e-06, + "loss": 2.6164, + "step": 255 + }, + { + "epoch": 0.34664861205145564, + "grad_norm": 0.8579010274341586, + "learning_rate": 9.864855514022955e-06, + "loss": 2.0521, + "step": 256 + }, + { + "epoch": 0.34800270819228163, + "grad_norm": 1.0456339797858754, + "learning_rate": 9.863607013077414e-06, + "loss": 1.8313, + "step": 257 + }, + { + "epoch": 0.3493568043331077, + "grad_norm": 0.9189984940983889, + "learning_rate": 9.862352851362808e-06, + "loss": 2.2923, + "step": 258 + }, + { + "epoch": 0.35071090047393366, + "grad_norm": 1.0071081750591437, + "learning_rate": 9.861093030338859e-06, + "loss": 2.5982, + "step": 259 + }, + { + "epoch": 0.35206499661475965, + "grad_norm": 0.7791457829988662, + "learning_rate": 9.859827551471877e-06, + "loss": 2.1629, + "step": 260 + }, + { + "epoch": 0.35341909275558564, + "grad_norm": 1.3508700131079088, + "learning_rate": 9.858556416234755e-06, + "loss": 2.3283, + "step": 261 + }, + { + "epoch": 0.35477318889641163, + "grad_norm": 1.4779336761646127, + "learning_rate": 9.857279626106975e-06, + "loss": 2.1788, + "step": 262 + }, + { + "epoch": 0.3561272850372376, + "grad_norm": 0.8287354033035986, + "learning_rate": 9.855997182574598e-06, + "loss": 2.4326, + "step": 263 + }, + { + "epoch": 0.35748138117806366, + "grad_norm": 1.2376776693679805, + "learning_rate": 9.854709087130261e-06, + "loss": 1.9501, + "step": 264 + }, + { + "epoch": 0.35883547731888965, + "grad_norm": 0.9140324089635932, + "learning_rate": 9.853415341273185e-06, + "loss": 2.1236, + "step": 265 + }, + { + "epoch": 0.36018957345971564, + "grad_norm": 0.8399357799885816, + "learning_rate": 9.852115946509163e-06, + "loss": 2.2648, + "step": 266 + }, + { + "epoch": 0.36154366960054163, + "grad_norm": 1.0405854607468001, + "learning_rate": 9.85081090435057e-06, + "loss": 2.2106, + "step": 267 + }, + { + "epoch": 0.3628977657413676, + "grad_norm": 0.8825320659362774, + "learning_rate": 9.849500216316346e-06, + "loss": 1.9139, + "step": 268 + }, + { + "epoch": 0.36425186188219366, + "grad_norm": 0.8884433268338788, + "learning_rate": 9.848183883932003e-06, + "loss": 2.3441, + "step": 269 + }, + { + "epoch": 0.36560595802301965, + "grad_norm": 0.8144408969478175, + "learning_rate": 9.846861908729628e-06, + "loss": 2.7794, + "step": 270 + }, + { + "epoch": 0.36696005416384564, + "grad_norm": 3.0643775465082093, + "learning_rate": 9.845534292247872e-06, + "loss": 2.6927, + "step": 271 + }, + { + "epoch": 0.36831415030467163, + "grad_norm": 1.0918166740808275, + "learning_rate": 9.844201036031952e-06, + "loss": 2.6845, + "step": 272 + }, + { + "epoch": 0.3696682464454976, + "grad_norm": 1.0782629441989242, + "learning_rate": 9.84286214163365e-06, + "loss": 2.1323, + "step": 273 + }, + { + "epoch": 0.3710223425863236, + "grad_norm": 0.899216159697703, + "learning_rate": 9.841517610611309e-06, + "loss": 2.5939, + "step": 274 + }, + { + "epoch": 0.37237643872714965, + "grad_norm": 0.8784697558034725, + "learning_rate": 9.840167444529834e-06, + "loss": 2.0398, + "step": 275 + }, + { + "epoch": 0.37373053486797564, + "grad_norm": 0.7893703178184768, + "learning_rate": 9.838811644960686e-06, + "loss": 2.3489, + "step": 276 + }, + { + "epoch": 0.37508463100880163, + "grad_norm": 1.5759025892565637, + "learning_rate": 9.837450213481888e-06, + "loss": 2.1618, + "step": 277 + }, + { + "epoch": 0.3764387271496276, + "grad_norm": 0.8915508527852146, + "learning_rate": 9.836083151678014e-06, + "loss": 2.2966, + "step": 278 + }, + { + "epoch": 0.3777928232904536, + "grad_norm": 0.9656457496381649, + "learning_rate": 9.834710461140191e-06, + "loss": 2.2487, + "step": 279 + }, + { + "epoch": 0.3791469194312796, + "grad_norm": 1.0512896665744822, + "learning_rate": 9.833332143466099e-06, + "loss": 2.4065, + "step": 280 + }, + { + "epoch": 0.38050101557210564, + "grad_norm": 1.0658387523528818, + "learning_rate": 9.831948200259966e-06, + "loss": 2.4469, + "step": 281 + }, + { + "epoch": 0.3818551117129316, + "grad_norm": 1.117757922229749, + "learning_rate": 9.830558633132568e-06, + "loss": 2.2469, + "step": 282 + }, + { + "epoch": 0.3832092078537576, + "grad_norm": 0.8846882799705557, + "learning_rate": 9.82916344370123e-06, + "loss": 1.8672, + "step": 283 + }, + { + "epoch": 0.3845633039945836, + "grad_norm": 0.8829678982284991, + "learning_rate": 9.827762633589813e-06, + "loss": 2.3709, + "step": 284 + }, + { + "epoch": 0.3859174001354096, + "grad_norm": 1.5559611510853741, + "learning_rate": 9.826356204428726e-06, + "loss": 2.531, + "step": 285 + }, + { + "epoch": 0.38727149627623564, + "grad_norm": 0.8832988192797496, + "learning_rate": 9.82494415785492e-06, + "loss": 2.0908, + "step": 286 + }, + { + "epoch": 0.3886255924170616, + "grad_norm": 0.893313058823839, + "learning_rate": 9.82352649551188e-06, + "loss": 2.2141, + "step": 287 + }, + { + "epoch": 0.3899796885578876, + "grad_norm": 1.915840114647375, + "learning_rate": 9.822103219049625e-06, + "loss": 2.3759, + "step": 288 + }, + { + "epoch": 0.3913337846987136, + "grad_norm": 1.091811159406276, + "learning_rate": 9.820674330124716e-06, + "loss": 1.949, + "step": 289 + }, + { + "epoch": 0.3926878808395396, + "grad_norm": 0.9101585142251976, + "learning_rate": 9.819239830400238e-06, + "loss": 2.2837, + "step": 290 + }, + { + "epoch": 0.3940419769803656, + "grad_norm": 0.8669554630795423, + "learning_rate": 9.81779972154581e-06, + "loss": 2.3568, + "step": 291 + }, + { + "epoch": 0.3953960731211916, + "grad_norm": 1.0812216653348674, + "learning_rate": 9.816354005237583e-06, + "loss": 2.5594, + "step": 292 + }, + { + "epoch": 0.3967501692620176, + "grad_norm": 1.0135729926732555, + "learning_rate": 9.814902683158227e-06, + "loss": 2.2677, + "step": 293 + }, + { + "epoch": 0.3981042654028436, + "grad_norm": 1.1354280196245004, + "learning_rate": 9.813445756996946e-06, + "loss": 2.1235, + "step": 294 + }, + { + "epoch": 0.3994583615436696, + "grad_norm": 0.8872371772513353, + "learning_rate": 9.811983228449457e-06, + "loss": 2.6214, + "step": 295 + }, + { + "epoch": 0.4008124576844956, + "grad_norm": 1.198158875138794, + "learning_rate": 9.810515099218004e-06, + "loss": 2.5482, + "step": 296 + }, + { + "epoch": 0.40216655382532157, + "grad_norm": 1.122073024930223, + "learning_rate": 9.809041371011347e-06, + "loss": 2.3414, + "step": 297 + }, + { + "epoch": 0.4035206499661476, + "grad_norm": 0.7450105063153143, + "learning_rate": 9.807562045544764e-06, + "loss": 2.5729, + "step": 298 + }, + { + "epoch": 0.4048747461069736, + "grad_norm": 1.096232419286508, + "learning_rate": 9.806077124540045e-06, + "loss": 2.5012, + "step": 299 + }, + { + "epoch": 0.4062288422477996, + "grad_norm": 1.0045118062203406, + "learning_rate": 9.804586609725499e-06, + "loss": 2.0684, + "step": 300 + }, + { + "epoch": 0.4075829383886256, + "grad_norm": 1.0368766866449026, + "learning_rate": 9.803090502835938e-06, + "loss": 2.1287, + "step": 301 + }, + { + "epoch": 0.40893703452945157, + "grad_norm": 1.0208588144634514, + "learning_rate": 9.801588805612685e-06, + "loss": 2.3341, + "step": 302 + }, + { + "epoch": 0.4102911306702776, + "grad_norm": 1.0454209454651813, + "learning_rate": 9.800081519803575e-06, + "loss": 2.088, + "step": 303 + }, + { + "epoch": 0.4116452268111036, + "grad_norm": 1.0546734132886375, + "learning_rate": 9.798568647162939e-06, + "loss": 2.3576, + "step": 304 + }, + { + "epoch": 0.4129993229519296, + "grad_norm": 1.1121493752708365, + "learning_rate": 9.797050189451615e-06, + "loss": 2.1073, + "step": 305 + }, + { + "epoch": 0.4143534190927556, + "grad_norm": 1.1189378911389287, + "learning_rate": 9.795526148436945e-06, + "loss": 2.308, + "step": 306 + }, + { + "epoch": 0.41570751523358157, + "grad_norm": 0.9899429575476786, + "learning_rate": 9.793996525892762e-06, + "loss": 2.2555, + "step": 307 + }, + { + "epoch": 0.41706161137440756, + "grad_norm": 1.0479984863056457, + "learning_rate": 9.7924613235994e-06, + "loss": 2.3882, + "step": 308 + }, + { + "epoch": 0.4184157075152336, + "grad_norm": 0.8437095271965467, + "learning_rate": 9.790920543343686e-06, + "loss": 2.8099, + "step": 309 + }, + { + "epoch": 0.4197698036560596, + "grad_norm": 1.1267645343143333, + "learning_rate": 9.78937418691894e-06, + "loss": 2.377, + "step": 310 + }, + { + "epoch": 0.4211238997968856, + "grad_norm": 0.954265542677354, + "learning_rate": 9.787822256124972e-06, + "loss": 2.2119, + "step": 311 + }, + { + "epoch": 0.42247799593771157, + "grad_norm": 1.0897928699498936, + "learning_rate": 9.78626475276808e-06, + "loss": 2.5392, + "step": 312 + }, + { + "epoch": 0.42383209207853756, + "grad_norm": 0.8557954442168854, + "learning_rate": 9.784701678661045e-06, + "loss": 2.0243, + "step": 313 + }, + { + "epoch": 0.42518618821936355, + "grad_norm": 1.007998713387866, + "learning_rate": 9.783133035623136e-06, + "loss": 2.0593, + "step": 314 + }, + { + "epoch": 0.4265402843601896, + "grad_norm": 0.8245127331943697, + "learning_rate": 9.781558825480104e-06, + "loss": 1.9544, + "step": 315 + }, + { + "epoch": 0.4278943805010156, + "grad_norm": 1.1343475642376186, + "learning_rate": 9.779979050064174e-06, + "loss": 2.1917, + "step": 316 + }, + { + "epoch": 0.42924847664184157, + "grad_norm": 1.1336908348453985, + "learning_rate": 9.778393711214054e-06, + "loss": 2.3615, + "step": 317 + }, + { + "epoch": 0.43060257278266756, + "grad_norm": 1.0074004622651764, + "learning_rate": 9.776802810774924e-06, + "loss": 2.4642, + "step": 318 + }, + { + "epoch": 0.43195666892349355, + "grad_norm": 0.869836917179775, + "learning_rate": 9.77520635059844e-06, + "loss": 2.2773, + "step": 319 + }, + { + "epoch": 0.4333107650643196, + "grad_norm": 0.8822131547317252, + "learning_rate": 9.77360433254273e-06, + "loss": 2.5491, + "step": 320 + }, + { + "epoch": 0.4346648612051456, + "grad_norm": 0.9871053514468092, + "learning_rate": 9.771996758472381e-06, + "loss": 2.5086, + "step": 321 + }, + { + "epoch": 0.43601895734597157, + "grad_norm": 0.8810949134669234, + "learning_rate": 9.770383630258463e-06, + "loss": 2.4492, + "step": 322 + }, + { + "epoch": 0.43737305348679756, + "grad_norm": 0.9634447042117443, + "learning_rate": 9.768764949778495e-06, + "loss": 2.4698, + "step": 323 + }, + { + "epoch": 0.43872714962762355, + "grad_norm": 0.9578841465826307, + "learning_rate": 9.767140718916467e-06, + "loss": 2.8468, + "step": 324 + }, + { + "epoch": 0.44008124576844954, + "grad_norm": 1.045773660121023, + "learning_rate": 9.765510939562827e-06, + "loss": 2.4331, + "step": 325 + }, + { + "epoch": 0.4414353419092756, + "grad_norm": 1.5236244191611785, + "learning_rate": 9.763875613614482e-06, + "loss": 2.4025, + "step": 326 + }, + { + "epoch": 0.44278943805010157, + "grad_norm": 0.9193628457370401, + "learning_rate": 9.762234742974793e-06, + "loss": 2.2136, + "step": 327 + }, + { + "epoch": 0.44414353419092756, + "grad_norm": 0.8949925268143644, + "learning_rate": 9.76058832955357e-06, + "loss": 2.4088, + "step": 328 + }, + { + "epoch": 0.44549763033175355, + "grad_norm": 2.0556356286018413, + "learning_rate": 9.758936375267087e-06, + "loss": 2.1351, + "step": 329 + }, + { + "epoch": 0.44685172647257954, + "grad_norm": 1.0444826822145854, + "learning_rate": 9.757278882038056e-06, + "loss": 2.7799, + "step": 330 + }, + { + "epoch": 0.4482058226134055, + "grad_norm": 1.0824437177931403, + "learning_rate": 9.755615851795639e-06, + "loss": 1.8668, + "step": 331 + }, + { + "epoch": 0.44955991875423157, + "grad_norm": 1.1111629995250478, + "learning_rate": 9.753947286475442e-06, + "loss": 2.754, + "step": 332 + }, + { + "epoch": 0.45091401489505756, + "grad_norm": 0.9573737568401282, + "learning_rate": 9.752273188019514e-06, + "loss": 2.5329, + "step": 333 + }, + { + "epoch": 0.45226811103588355, + "grad_norm": 1.0357238977124108, + "learning_rate": 9.750593558376347e-06, + "loss": 2.2007, + "step": 334 + }, + { + "epoch": 0.45362220717670954, + "grad_norm": 0.9095487712099384, + "learning_rate": 9.748908399500863e-06, + "loss": 1.8825, + "step": 335 + }, + { + "epoch": 0.4549763033175355, + "grad_norm": 1.7445295348550816, + "learning_rate": 9.747217713354428e-06, + "loss": 2.5305, + "step": 336 + }, + { + "epoch": 0.45633039945836157, + "grad_norm": 0.9070558170854458, + "learning_rate": 9.745521501904835e-06, + "loss": 2.1951, + "step": 337 + }, + { + "epoch": 0.45768449559918756, + "grad_norm": 0.8995516521874707, + "learning_rate": 9.743819767126312e-06, + "loss": 2.2137, + "step": 338 + }, + { + "epoch": 0.45903859174001355, + "grad_norm": 1.9426471289696312, + "learning_rate": 9.742112510999516e-06, + "loss": 2.3716, + "step": 339 + }, + { + "epoch": 0.46039268788083954, + "grad_norm": 0.9725576998378328, + "learning_rate": 9.740399735511524e-06, + "loss": 2.6405, + "step": 340 + }, + { + "epoch": 0.4617467840216655, + "grad_norm": 0.8305802783581133, + "learning_rate": 9.738681442655842e-06, + "loss": 2.2231, + "step": 341 + }, + { + "epoch": 0.4631008801624915, + "grad_norm": 0.9734526439933455, + "learning_rate": 9.736957634432398e-06, + "loss": 2.3041, + "step": 342 + }, + { + "epoch": 0.46445497630331756, + "grad_norm": 0.824808147309629, + "learning_rate": 9.73522831284754e-06, + "loss": 2.3024, + "step": 343 + }, + { + "epoch": 0.46580907244414355, + "grad_norm": 0.9599600802177057, + "learning_rate": 9.733493479914031e-06, + "loss": 2.3563, + "step": 344 + }, + { + "epoch": 0.46716316858496953, + "grad_norm": 0.9340063834780592, + "learning_rate": 9.731753137651047e-06, + "loss": 2.1237, + "step": 345 + }, + { + "epoch": 0.4685172647257955, + "grad_norm": 1.562533249030556, + "learning_rate": 9.730007288084178e-06, + "loss": 2.3575, + "step": 346 + }, + { + "epoch": 0.4698713608666215, + "grad_norm": 0.9490993069469829, + "learning_rate": 9.728255933245428e-06, + "loss": 2.8822, + "step": 347 + }, + { + "epoch": 0.4712254570074475, + "grad_norm": 0.9566724872274933, + "learning_rate": 9.726499075173201e-06, + "loss": 2.3402, + "step": 348 + }, + { + "epoch": 0.47257955314827355, + "grad_norm": 1.008513385617003, + "learning_rate": 9.724736715912313e-06, + "loss": 2.1406, + "step": 349 + }, + { + "epoch": 0.47393364928909953, + "grad_norm": 1.1766610712644368, + "learning_rate": 9.72296885751398e-06, + "loss": 2.1934, + "step": 350 + }, + { + "epoch": 0.4752877454299255, + "grad_norm": 0.9325244223791722, + "learning_rate": 9.721195502035817e-06, + "loss": 1.9835, + "step": 351 + }, + { + "epoch": 0.4766418415707515, + "grad_norm": 0.837427966240756, + "learning_rate": 9.719416651541839e-06, + "loss": 2.481, + "step": 352 + }, + { + "epoch": 0.4779959377115775, + "grad_norm": 1.02007044942659, + "learning_rate": 9.717632308102455e-06, + "loss": 2.7148, + "step": 353 + }, + { + "epoch": 0.47935003385240355, + "grad_norm": 0.959116878107154, + "learning_rate": 9.715842473794472e-06, + "loss": 2.4738, + "step": 354 + }, + { + "epoch": 0.48070412999322953, + "grad_norm": 1.0102624905029922, + "learning_rate": 9.714047150701082e-06, + "loss": 2.0945, + "step": 355 + }, + { + "epoch": 0.4820582261340555, + "grad_norm": 2.0984222479912438, + "learning_rate": 9.712246340911866e-06, + "loss": 2.1697, + "step": 356 + }, + { + "epoch": 0.4834123222748815, + "grad_norm": 0.917396475775467, + "learning_rate": 9.710440046522797e-06, + "loss": 2.2475, + "step": 357 + }, + { + "epoch": 0.4847664184157075, + "grad_norm": 1.005361407668732, + "learning_rate": 9.708628269636224e-06, + "loss": 2.2307, + "step": 358 + }, + { + "epoch": 0.4861205145565335, + "grad_norm": 1.0309046462532379, + "learning_rate": 9.706811012360882e-06, + "loss": 2.4654, + "step": 359 + }, + { + "epoch": 0.48747461069735953, + "grad_norm": 3.0659420771722834, + "learning_rate": 9.704988276811883e-06, + "loss": 2.7097, + "step": 360 + }, + { + "epoch": 0.4888287068381855, + "grad_norm": 1.0772251854315056, + "learning_rate": 9.703160065110716e-06, + "loss": 2.3728, + "step": 361 + }, + { + "epoch": 0.4901828029790115, + "grad_norm": 1.1656105776873809, + "learning_rate": 9.701326379385238e-06, + "loss": 2.6172, + "step": 362 + }, + { + "epoch": 0.4915368991198375, + "grad_norm": 1.0293153712278362, + "learning_rate": 9.699487221769687e-06, + "loss": 2.427, + "step": 363 + }, + { + "epoch": 0.4928909952606635, + "grad_norm": 1.1554858971025437, + "learning_rate": 9.697642594404666e-06, + "loss": 2.3854, + "step": 364 + }, + { + "epoch": 0.4942450914014895, + "grad_norm": 1.1552799937404215, + "learning_rate": 9.69579249943714e-06, + "loss": 2.2642, + "step": 365 + }, + { + "epoch": 0.4955991875423155, + "grad_norm": 1.3024770839893858, + "learning_rate": 9.693936939020441e-06, + "loss": 2.7555, + "step": 366 + }, + { + "epoch": 0.4969532836831415, + "grad_norm": 0.9139562471712885, + "learning_rate": 9.692075915314265e-06, + "loss": 1.9849, + "step": 367 + }, + { + "epoch": 0.4983073798239675, + "grad_norm": 1.1108979731718802, + "learning_rate": 9.69020943048466e-06, + "loss": 2.7987, + "step": 368 + }, + { + "epoch": 0.4996614759647935, + "grad_norm": 1.043554360812148, + "learning_rate": 9.688337486704038e-06, + "loss": 2.379, + "step": 369 + }, + { + "epoch": 0.5010155721056195, + "grad_norm": 0.8482819857067955, + "learning_rate": 9.686460086151159e-06, + "loss": 2.3031, + "step": 370 + }, + { + "epoch": 0.5023696682464455, + "grad_norm": 0.882893031313843, + "learning_rate": 9.684577231011134e-06, + "loss": 1.9398, + "step": 371 + }, + { + "epoch": 0.5037237643872715, + "grad_norm": 0.8900569461037698, + "learning_rate": 9.68268892347543e-06, + "loss": 2.3579, + "step": 372 + }, + { + "epoch": 0.5050778605280974, + "grad_norm": 1.027440419577199, + "learning_rate": 9.680795165741849e-06, + "loss": 1.8953, + "step": 373 + }, + { + "epoch": 0.5064319566689235, + "grad_norm": 1.2020524441224305, + "learning_rate": 9.678895960014545e-06, + "loss": 2.6735, + "step": 374 + }, + { + "epoch": 0.5077860528097495, + "grad_norm": 1.6359343554936903, + "learning_rate": 9.676991308504012e-06, + "loss": 2.5313, + "step": 375 + }, + { + "epoch": 0.5091401489505755, + "grad_norm": 0.9318914822552815, + "learning_rate": 9.675081213427076e-06, + "loss": 2.7319, + "step": 376 + }, + { + "epoch": 0.5104942450914015, + "grad_norm": 1.0629414040228036, + "learning_rate": 9.673165677006906e-06, + "loss": 2.5041, + "step": 377 + }, + { + "epoch": 0.5118483412322274, + "grad_norm": 0.9015543787336042, + "learning_rate": 9.671244701472999e-06, + "loss": 2.2052, + "step": 378 + }, + { + "epoch": 0.5132024373730535, + "grad_norm": 0.9603399577043747, + "learning_rate": 9.669318289061191e-06, + "loss": 2.5069, + "step": 379 + }, + { + "epoch": 0.5145565335138795, + "grad_norm": 0.8421778244290412, + "learning_rate": 9.667386442013634e-06, + "loss": 2.6313, + "step": 380 + }, + { + "epoch": 0.5159106296547055, + "grad_norm": 0.8356138567382542, + "learning_rate": 9.665449162578814e-06, + "loss": 2.0145, + "step": 381 + }, + { + "epoch": 0.5172647257955315, + "grad_norm": 0.9669755721462949, + "learning_rate": 9.663506453011538e-06, + "loss": 2.2227, + "step": 382 + }, + { + "epoch": 0.5186188219363574, + "grad_norm": 1.163959225455655, + "learning_rate": 9.66155831557293e-06, + "loss": 2.5775, + "step": 383 + }, + { + "epoch": 0.5199729180771835, + "grad_norm": 0.9362192144637581, + "learning_rate": 9.659604752530434e-06, + "loss": 2.2014, + "step": 384 + }, + { + "epoch": 0.5213270142180095, + "grad_norm": 0.9892445662855336, + "learning_rate": 9.657645766157813e-06, + "loss": 2.4195, + "step": 385 + }, + { + "epoch": 0.5226811103588355, + "grad_norm": 0.8722826858897498, + "learning_rate": 9.655681358735134e-06, + "loss": 2.6966, + "step": 386 + }, + { + "epoch": 0.5240352064996615, + "grad_norm": 1.0196349369430946, + "learning_rate": 9.653711532548778e-06, + "loss": 2.4237, + "step": 387 + }, + { + "epoch": 0.5253893026404874, + "grad_norm": 1.0011773469402157, + "learning_rate": 9.651736289891434e-06, + "loss": 2.3765, + "step": 388 + }, + { + "epoch": 0.5267433987813135, + "grad_norm": 0.9843650150083161, + "learning_rate": 9.649755633062092e-06, + "loss": 2.4698, + "step": 389 + }, + { + "epoch": 0.5280974949221394, + "grad_norm": 1.2199752234970775, + "learning_rate": 9.647769564366048e-06, + "loss": 2.2025, + "step": 390 + }, + { + "epoch": 0.5294515910629655, + "grad_norm": 1.048526738243433, + "learning_rate": 9.645778086114892e-06, + "loss": 2.5315, + "step": 391 + }, + { + "epoch": 0.5308056872037915, + "grad_norm": 1.0079058862169128, + "learning_rate": 9.643781200626512e-06, + "loss": 2.2688, + "step": 392 + }, + { + "epoch": 0.5321597833446174, + "grad_norm": 0.9175841000628026, + "learning_rate": 9.641778910225093e-06, + "loss": 2.4924, + "step": 393 + }, + { + "epoch": 0.5335138794854435, + "grad_norm": 0.9394126148321758, + "learning_rate": 9.639771217241104e-06, + "loss": 2.6604, + "step": 394 + }, + { + "epoch": 0.5348679756262694, + "grad_norm": 1.8536643574976448, + "learning_rate": 9.637758124011307e-06, + "loss": 2.3891, + "step": 395 + }, + { + "epoch": 0.5362220717670955, + "grad_norm": 1.353118661391877, + "learning_rate": 9.63573963287875e-06, + "loss": 2.2486, + "step": 396 + }, + { + "epoch": 0.5375761679079215, + "grad_norm": 0.9114437873284958, + "learning_rate": 9.633715746192762e-06, + "loss": 2.9009, + "step": 397 + }, + { + "epoch": 0.5389302640487474, + "grad_norm": 0.9178272536878976, + "learning_rate": 9.631686466308947e-06, + "loss": 2.6545, + "step": 398 + }, + { + "epoch": 0.5402843601895735, + "grad_norm": 0.8349824532422451, + "learning_rate": 9.629651795589197e-06, + "loss": 2.3238, + "step": 399 + }, + { + "epoch": 0.5416384563303994, + "grad_norm": 0.8002054931156397, + "learning_rate": 9.627611736401668e-06, + "loss": 2.1503, + "step": 400 + }, + { + "epoch": 0.5429925524712255, + "grad_norm": 1.014043339956385, + "learning_rate": 9.625566291120794e-06, + "loss": 2.3366, + "step": 401 + }, + { + "epoch": 0.5443466486120515, + "grad_norm": 0.9753524140226457, + "learning_rate": 9.623515462127276e-06, + "loss": 2.4777, + "step": 402 + }, + { + "epoch": 0.5457007447528774, + "grad_norm": 0.9904758899407581, + "learning_rate": 9.621459251808078e-06, + "loss": 2.5355, + "step": 403 + }, + { + "epoch": 0.5470548408937035, + "grad_norm": 0.9176434471710123, + "learning_rate": 9.619397662556434e-06, + "loss": 2.1775, + "step": 404 + }, + { + "epoch": 0.5484089370345294, + "grad_norm": 0.9994849139704428, + "learning_rate": 9.617330696771834e-06, + "loss": 2.3618, + "step": 405 + }, + { + "epoch": 0.5497630331753555, + "grad_norm": 0.9718579619606619, + "learning_rate": 9.615258356860027e-06, + "loss": 2.065, + "step": 406 + }, + { + "epoch": 0.5511171293161814, + "grad_norm": 0.8890137365695405, + "learning_rate": 9.613180645233014e-06, + "loss": 2.153, + "step": 407 + }, + { + "epoch": 0.5524712254570074, + "grad_norm": 1.001837019128747, + "learning_rate": 9.611097564309054e-06, + "loss": 2.4168, + "step": 408 + }, + { + "epoch": 0.5538253215978335, + "grad_norm": 1.205118639472601, + "learning_rate": 9.609009116512648e-06, + "loss": 2.5229, + "step": 409 + }, + { + "epoch": 0.5551794177386594, + "grad_norm": 0.9757578479246173, + "learning_rate": 9.60691530427455e-06, + "loss": 2.1174, + "step": 410 + }, + { + "epoch": 0.5565335138794855, + "grad_norm": 0.9985946290929901, + "learning_rate": 9.60481613003176e-06, + "loss": 1.9395, + "step": 411 + }, + { + "epoch": 0.5578876100203114, + "grad_norm": 0.9058862928790223, + "learning_rate": 9.602711596227507e-06, + "loss": 2.1988, + "step": 412 + }, + { + "epoch": 0.5592417061611374, + "grad_norm": 0.9620888369560685, + "learning_rate": 9.600601705311267e-06, + "loss": 2.3826, + "step": 413 + }, + { + "epoch": 0.5605958023019635, + "grad_norm": 1.0478032599035778, + "learning_rate": 9.598486459738751e-06, + "loss": 2.4528, + "step": 414 + }, + { + "epoch": 0.5619498984427894, + "grad_norm": 0.8838781940692847, + "learning_rate": 9.5963658619719e-06, + "loss": 2.2238, + "step": 415 + }, + { + "epoch": 0.5633039945836155, + "grad_norm": 1.044653451464992, + "learning_rate": 9.594239914478886e-06, + "loss": 2.8379, + "step": 416 + }, + { + "epoch": 0.5646580907244414, + "grad_norm": 0.9143098376517602, + "learning_rate": 9.592108619734107e-06, + "loss": 2.3621, + "step": 417 + }, + { + "epoch": 0.5660121868652674, + "grad_norm": 1.3056702719267006, + "learning_rate": 9.58997198021818e-06, + "loss": 2.3495, + "step": 418 + }, + { + "epoch": 0.5673662830060935, + "grad_norm": 1.2114169289052528, + "learning_rate": 9.587829998417953e-06, + "loss": 2.6471, + "step": 419 + }, + { + "epoch": 0.5687203791469194, + "grad_norm": 1.046324944660913, + "learning_rate": 9.58568267682648e-06, + "loss": 2.402, + "step": 420 + }, + { + "epoch": 0.5700744752877455, + "grad_norm": 0.9349583810327691, + "learning_rate": 9.58353001794304e-06, + "loss": 2.2225, + "step": 421 + }, + { + "epoch": 0.5714285714285714, + "grad_norm": 1.481772765240176, + "learning_rate": 9.581372024273121e-06, + "loss": 1.915, + "step": 422 + }, + { + "epoch": 0.5727826675693974, + "grad_norm": 0.928476736332317, + "learning_rate": 9.579208698328419e-06, + "loss": 1.8008, + "step": 423 + }, + { + "epoch": 0.5741367637102234, + "grad_norm": 0.9260270264242221, + "learning_rate": 9.577040042626832e-06, + "loss": 2.417, + "step": 424 + }, + { + "epoch": 0.5754908598510494, + "grad_norm": 1.1064706208462298, + "learning_rate": 9.574866059692471e-06, + "loss": 2.5265, + "step": 425 + }, + { + "epoch": 0.5768449559918755, + "grad_norm": 2.139248004116943, + "learning_rate": 9.57268675205564e-06, + "loss": 2.1862, + "step": 426 + }, + { + "epoch": 0.5781990521327014, + "grad_norm": 1.2117707663910726, + "learning_rate": 9.570502122252844e-06, + "loss": 2.8615, + "step": 427 + }, + { + "epoch": 0.5795531482735274, + "grad_norm": 0.8678663266783263, + "learning_rate": 9.568312172826779e-06, + "loss": 2.2196, + "step": 428 + }, + { + "epoch": 0.5809072444143534, + "grad_norm": 1.183740118318461, + "learning_rate": 9.566116906326336e-06, + "loss": 2.7205, + "step": 429 + }, + { + "epoch": 0.5822613405551794, + "grad_norm": 0.9774576381702014, + "learning_rate": 9.563916325306595e-06, + "loss": 2.2203, + "step": 430 + }, + { + "epoch": 0.5836154366960055, + "grad_norm": 0.9824933411377383, + "learning_rate": 9.561710432328817e-06, + "loss": 2.7149, + "step": 431 + }, + { + "epoch": 0.5849695328368314, + "grad_norm": 1.8285370131236038, + "learning_rate": 9.55949922996045e-06, + "loss": 2.1549, + "step": 432 + }, + { + "epoch": 0.5863236289776574, + "grad_norm": 0.92698096294728, + "learning_rate": 9.55728272077512e-06, + "loss": 2.0743, + "step": 433 + }, + { + "epoch": 0.5876777251184834, + "grad_norm": 1.0344108156736416, + "learning_rate": 9.555060907352632e-06, + "loss": 2.3762, + "step": 434 + }, + { + "epoch": 0.5890318212593094, + "grad_norm": 1.0045554696436734, + "learning_rate": 9.552833792278957e-06, + "loss": 2.1954, + "step": 435 + }, + { + "epoch": 0.5903859174001355, + "grad_norm": 0.7967159103822394, + "learning_rate": 9.550601378146246e-06, + "loss": 2.0138, + "step": 436 + }, + { + "epoch": 0.5917400135409614, + "grad_norm": 0.8111896941287193, + "learning_rate": 9.54836366755281e-06, + "loss": 2.3701, + "step": 437 + }, + { + "epoch": 0.5930941096817874, + "grad_norm": 0.9726524616144652, + "learning_rate": 9.546120663103134e-06, + "loss": 1.9217, + "step": 438 + }, + { + "epoch": 0.5944482058226134, + "grad_norm": 1.0256397319731523, + "learning_rate": 9.543872367407854e-06, + "loss": 2.0282, + "step": 439 + }, + { + "epoch": 0.5958023019634394, + "grad_norm": 1.0079882867447063, + "learning_rate": 9.54161878308377e-06, + "loss": 2.3361, + "step": 440 + }, + { + "epoch": 0.5971563981042654, + "grad_norm": 0.9800767836635148, + "learning_rate": 9.539359912753839e-06, + "loss": 2.6659, + "step": 441 + }, + { + "epoch": 0.5985104942450914, + "grad_norm": 1.0649583722690574, + "learning_rate": 9.537095759047163e-06, + "loss": 2.4143, + "step": 442 + }, + { + "epoch": 0.5998645903859174, + "grad_norm": 1.174278203502222, + "learning_rate": 9.534826324599002e-06, + "loss": 2.22, + "step": 443 + }, + { + "epoch": 0.6012186865267434, + "grad_norm": 1.3010142344912385, + "learning_rate": 9.53255161205076e-06, + "loss": 2.1634, + "step": 444 + }, + { + "epoch": 0.6025727826675694, + "grad_norm": 0.8409212163726476, + "learning_rate": 9.530271624049979e-06, + "loss": 1.8782, + "step": 445 + }, + { + "epoch": 0.6039268788083954, + "grad_norm": 0.8027451978851314, + "learning_rate": 9.527986363250348e-06, + "loss": 2.203, + "step": 446 + }, + { + "epoch": 0.6052809749492214, + "grad_norm": 0.9039555297703499, + "learning_rate": 9.525695832311688e-06, + "loss": 2.3687, + "step": 447 + }, + { + "epoch": 0.6066350710900474, + "grad_norm": 0.8912812069256285, + "learning_rate": 9.523400033899957e-06, + "loss": 2.2258, + "step": 448 + }, + { + "epoch": 0.6079891672308734, + "grad_norm": 0.8488642026117442, + "learning_rate": 9.52109897068724e-06, + "loss": 2.4719, + "step": 449 + }, + { + "epoch": 0.6093432633716994, + "grad_norm": 0.9189612986590668, + "learning_rate": 9.518792645351757e-06, + "loss": 2.1323, + "step": 450 + }, + { + "epoch": 0.6106973595125254, + "grad_norm": 1.0241173909729102, + "learning_rate": 9.516481060577847e-06, + "loss": 2.3125, + "step": 451 + }, + { + "epoch": 0.6120514556533514, + "grad_norm": 0.9700124960446453, + "learning_rate": 9.514164219055969e-06, + "loss": 2.1262, + "step": 452 + }, + { + "epoch": 0.6134055517941774, + "grad_norm": 0.9865209216124129, + "learning_rate": 9.511842123482703e-06, + "loss": 2.0346, + "step": 453 + }, + { + "epoch": 0.6147596479350034, + "grad_norm": 0.8339628050854324, + "learning_rate": 9.509514776560747e-06, + "loss": 2.2793, + "step": 454 + }, + { + "epoch": 0.6161137440758294, + "grad_norm": 1.2124630572926367, + "learning_rate": 9.507182180998906e-06, + "loss": 2.1059, + "step": 455 + }, + { + "epoch": 0.6174678402166554, + "grad_norm": 1.1523483755668846, + "learning_rate": 9.504844339512096e-06, + "loss": 2.1094, + "step": 456 + }, + { + "epoch": 0.6188219363574814, + "grad_norm": 0.9250806368586671, + "learning_rate": 9.50250125482134e-06, + "loss": 2.6985, + "step": 457 + }, + { + "epoch": 0.6201760324983073, + "grad_norm": 1.2028362769677976, + "learning_rate": 9.500152929653764e-06, + "loss": 2.3153, + "step": 458 + }, + { + "epoch": 0.6215301286391334, + "grad_norm": 1.071969908607831, + "learning_rate": 9.497799366742586e-06, + "loss": 2.0759, + "step": 459 + }, + { + "epoch": 0.6228842247799594, + "grad_norm": 1.3535021074481768, + "learning_rate": 9.49544056882713e-06, + "loss": 2.2538, + "step": 460 + }, + { + "epoch": 0.6242383209207854, + "grad_norm": 1.1103088277032604, + "learning_rate": 9.49307653865281e-06, + "loss": 2.0217, + "step": 461 + }, + { + "epoch": 0.6255924170616114, + "grad_norm": 0.9163278193375946, + "learning_rate": 9.490707278971127e-06, + "loss": 2.2543, + "step": 462 + }, + { + "epoch": 0.6269465132024373, + "grad_norm": 0.953261634167317, + "learning_rate": 9.488332792539672e-06, + "loss": 2.6539, + "step": 463 + }, + { + "epoch": 0.6283006093432634, + "grad_norm": 1.029379230332492, + "learning_rate": 9.485953082122116e-06, + "loss": 2.3507, + "step": 464 + }, + { + "epoch": 0.6296547054840894, + "grad_norm": 0.9618296595544104, + "learning_rate": 9.483568150488215e-06, + "loss": 2.4448, + "step": 465 + }, + { + "epoch": 0.6310088016249153, + "grad_norm": 0.9594228312002971, + "learning_rate": 9.481178000413796e-06, + "loss": 2.6108, + "step": 466 + }, + { + "epoch": 0.6323628977657414, + "grad_norm": 0.9624426030456921, + "learning_rate": 9.478782634680765e-06, + "loss": 2.3803, + "step": 467 + }, + { + "epoch": 0.6337169939065673, + "grad_norm": 0.9808684308680445, + "learning_rate": 9.476382056077097e-06, + "loss": 2.5205, + "step": 468 + }, + { + "epoch": 0.6350710900473934, + "grad_norm": 1.0917634456153018, + "learning_rate": 9.473976267396831e-06, + "loss": 2.008, + "step": 469 + }, + { + "epoch": 0.6364251861882194, + "grad_norm": 1.85759777161883, + "learning_rate": 9.471565271440075e-06, + "loss": 1.9461, + "step": 470 + }, + { + "epoch": 0.6377792823290453, + "grad_norm": 0.9263896064370456, + "learning_rate": 9.469149071012996e-06, + "loss": 2.1459, + "step": 471 + }, + { + "epoch": 0.6391333784698714, + "grad_norm": 1.2831155066680264, + "learning_rate": 9.466727668927817e-06, + "loss": 2.5968, + "step": 472 + }, + { + "epoch": 0.6404874746106973, + "grad_norm": 2.5926011027880405, + "learning_rate": 9.464301068002815e-06, + "loss": 2.8532, + "step": 473 + }, + { + "epoch": 0.6418415707515234, + "grad_norm": 1.3517479249404691, + "learning_rate": 9.461869271062322e-06, + "loss": 2.5085, + "step": 474 + }, + { + "epoch": 0.6431956668923493, + "grad_norm": 1.0653079693612253, + "learning_rate": 9.459432280936714e-06, + "loss": 1.926, + "step": 475 + }, + { + "epoch": 0.6445497630331753, + "grad_norm": 1.0222358313347504, + "learning_rate": 9.456990100462411e-06, + "loss": 2.0835, + "step": 476 + }, + { + "epoch": 0.6459038591740014, + "grad_norm": 0.9472201990170999, + "learning_rate": 9.454542732481876e-06, + "loss": 2.5298, + "step": 477 + }, + { + "epoch": 0.6472579553148273, + "grad_norm": 1.05236931626042, + "learning_rate": 9.452090179843609e-06, + "loss": 2.5688, + "step": 478 + }, + { + "epoch": 0.6486120514556534, + "grad_norm": 0.9205333314410238, + "learning_rate": 9.449632445402146e-06, + "loss": 2.2376, + "step": 479 + }, + { + "epoch": 0.6499661475964793, + "grad_norm": 0.936047746739389, + "learning_rate": 9.44716953201805e-06, + "loss": 2.2657, + "step": 480 + }, + { + "epoch": 0.6513202437373053, + "grad_norm": 0.984012799628733, + "learning_rate": 9.444701442557917e-06, + "loss": 2.2747, + "step": 481 + }, + { + "epoch": 0.6526743398781314, + "grad_norm": 1.1605171743997218, + "learning_rate": 9.442228179894362e-06, + "loss": 2.5819, + "step": 482 + }, + { + "epoch": 0.6540284360189573, + "grad_norm": 1.0367078942789454, + "learning_rate": 9.439749746906027e-06, + "loss": 2.3326, + "step": 483 + }, + { + "epoch": 0.6553825321597834, + "grad_norm": 0.8511526169640604, + "learning_rate": 9.437266146477567e-06, + "loss": 2.3916, + "step": 484 + }, + { + "epoch": 0.6567366283006093, + "grad_norm": 0.9546785433848596, + "learning_rate": 9.434777381499654e-06, + "loss": 2.2932, + "step": 485 + }, + { + "epoch": 0.6580907244414353, + "grad_norm": 0.897715373597665, + "learning_rate": 9.43228345486897e-06, + "loss": 2.2329, + "step": 486 + }, + { + "epoch": 0.6594448205822614, + "grad_norm": 1.1576802918228548, + "learning_rate": 9.429784369488205e-06, + "loss": 2.5906, + "step": 487 + }, + { + "epoch": 0.6607989167230873, + "grad_norm": 1.2669978365090186, + "learning_rate": 9.427280128266049e-06, + "loss": 2.0969, + "step": 488 + }, + { + "epoch": 0.6621530128639134, + "grad_norm": 1.116706571792338, + "learning_rate": 9.424770734117206e-06, + "loss": 2.0978, + "step": 489 + }, + { + "epoch": 0.6635071090047393, + "grad_norm": 1.0731484382258585, + "learning_rate": 9.42225618996236e-06, + "loss": 2.2606, + "step": 490 + }, + { + "epoch": 0.6648612051455653, + "grad_norm": 1.0217610153976007, + "learning_rate": 9.419736498728203e-06, + "loss": 2.0359, + "step": 491 + }, + { + "epoch": 0.6662153012863913, + "grad_norm": 0.9470105904564678, + "learning_rate": 9.417211663347407e-06, + "loss": 2.3133, + "step": 492 + }, + { + "epoch": 0.6675693974272173, + "grad_norm": 1.1496175064197292, + "learning_rate": 9.414681686758645e-06, + "loss": 2.6914, + "step": 493 + }, + { + "epoch": 0.6689234935680434, + "grad_norm": 1.1088711952773571, + "learning_rate": 9.412146571906556e-06, + "loss": 2.1629, + "step": 494 + }, + { + "epoch": 0.6702775897088693, + "grad_norm": 2.4048301880979315, + "learning_rate": 9.409606321741776e-06, + "loss": 2.3511, + "step": 495 + }, + { + "epoch": 0.6716316858496953, + "grad_norm": 1.0855961625720172, + "learning_rate": 9.407060939220907e-06, + "loss": 3.0198, + "step": 496 + }, + { + "epoch": 0.6729857819905213, + "grad_norm": 0.831939027060162, + "learning_rate": 9.404510427306533e-06, + "loss": 2.0721, + "step": 497 + }, + { + "epoch": 0.6743398781313473, + "grad_norm": 0.9189030221620978, + "learning_rate": 9.401954788967199e-06, + "loss": 2.1533, + "step": 498 + }, + { + "epoch": 0.6756939742721734, + "grad_norm": 1.1215682593702503, + "learning_rate": 9.39939402717742e-06, + "loss": 2.3017, + "step": 499 + }, + { + "epoch": 0.6770480704129993, + "grad_norm": 0.9997121388139094, + "learning_rate": 9.396828144917682e-06, + "loss": 2.1092, + "step": 500 + }, + { + "epoch": 0.6784021665538253, + "grad_norm": 1.0419306713261767, + "learning_rate": 9.39425714517442e-06, + "loss": 2.3772, + "step": 501 + }, + { + "epoch": 0.6797562626946513, + "grad_norm": 0.8546730463318373, + "learning_rate": 9.391681030940031e-06, + "loss": 2.4692, + "step": 502 + }, + { + "epoch": 0.6811103588354773, + "grad_norm": 1.036666833583794, + "learning_rate": 9.389099805212862e-06, + "loss": 2.4967, + "step": 503 + }, + { + "epoch": 0.6824644549763034, + "grad_norm": 0.9028903653709767, + "learning_rate": 9.38651347099721e-06, + "loss": 2.2658, + "step": 504 + }, + { + "epoch": 0.6838185511171293, + "grad_norm": 1.004993430273087, + "learning_rate": 9.38392203130332e-06, + "loss": 2.2413, + "step": 505 + }, + { + "epoch": 0.6851726472579553, + "grad_norm": 4.244116799684234, + "learning_rate": 9.38132548914738e-06, + "loss": 2.2353, + "step": 506 + }, + { + "epoch": 0.6865267433987813, + "grad_norm": 2.0623005376684516, + "learning_rate": 9.37872384755151e-06, + "loss": 2.3859, + "step": 507 + }, + { + "epoch": 0.6878808395396073, + "grad_norm": 1.099884816816885, + "learning_rate": 9.376117109543769e-06, + "loss": 1.8825, + "step": 508 + }, + { + "epoch": 0.6892349356804333, + "grad_norm": 0.9736281444679528, + "learning_rate": 9.373505278158152e-06, + "loss": 2.4243, + "step": 509 + }, + { + "epoch": 0.6905890318212593, + "grad_norm": 0.9265991736695152, + "learning_rate": 9.370888356434577e-06, + "loss": 2.5581, + "step": 510 + }, + { + "epoch": 0.6919431279620853, + "grad_norm": 0.9742740668751364, + "learning_rate": 9.368266347418891e-06, + "loss": 2.2436, + "step": 511 + }, + { + "epoch": 0.6932972241029113, + "grad_norm": 0.903250624410521, + "learning_rate": 9.365639254162855e-06, + "loss": 2.3704, + "step": 512 + }, + { + "epoch": 0.6946513202437373, + "grad_norm": 1.011884451867896, + "learning_rate": 9.363007079724153e-06, + "loss": 2.3554, + "step": 513 + }, + { + "epoch": 0.6960054163845633, + "grad_norm": 2.1268078439425504, + "learning_rate": 9.360369827166385e-06, + "loss": 2.2531, + "step": 514 + }, + { + "epoch": 0.6973595125253893, + "grad_norm": 0.9226076560386388, + "learning_rate": 9.357727499559055e-06, + "loss": 2.1536, + "step": 515 + }, + { + "epoch": 0.6987136086662153, + "grad_norm": 0.9235717043788523, + "learning_rate": 9.355080099977579e-06, + "loss": 2.5795, + "step": 516 + }, + { + "epoch": 0.7000677048070413, + "grad_norm": 0.997624756104421, + "learning_rate": 9.352427631503274e-06, + "loss": 2.4086, + "step": 517 + }, + { + "epoch": 0.7014218009478673, + "grad_norm": 1.0325935042936567, + "learning_rate": 9.349770097223356e-06, + "loss": 2.1905, + "step": 518 + }, + { + "epoch": 0.7027758970886933, + "grad_norm": 0.9779488073579523, + "learning_rate": 9.347107500230941e-06, + "loss": 2.1902, + "step": 519 + }, + { + "epoch": 0.7041299932295193, + "grad_norm": 0.9071035111230149, + "learning_rate": 9.344439843625034e-06, + "loss": 2.4966, + "step": 520 + }, + { + "epoch": 0.7054840893703453, + "grad_norm": 0.9294082146423078, + "learning_rate": 9.341767130510529e-06, + "loss": 2.1556, + "step": 521 + }, + { + "epoch": 0.7068381855111713, + "grad_norm": 0.8010254757576656, + "learning_rate": 9.339089363998206e-06, + "loss": 2.2359, + "step": 522 + }, + { + "epoch": 0.7081922816519973, + "grad_norm": 1.2492381753400523, + "learning_rate": 9.336406547204726e-06, + "loss": 2.0768, + "step": 523 + }, + { + "epoch": 0.7095463777928233, + "grad_norm": 0.960396750578361, + "learning_rate": 9.333718683252631e-06, + "loss": 2.2373, + "step": 524 + }, + { + "epoch": 0.7109004739336493, + "grad_norm": 0.9089783184513304, + "learning_rate": 9.331025775270335e-06, + "loss": 2.0008, + "step": 525 + }, + { + "epoch": 0.7122545700744752, + "grad_norm": 1.0902851866790608, + "learning_rate": 9.32832782639212e-06, + "loss": 2.452, + "step": 526 + }, + { + "epoch": 0.7136086662153013, + "grad_norm": 1.0819110431428438, + "learning_rate": 9.325624839758142e-06, + "loss": 2.1797, + "step": 527 + }, + { + "epoch": 0.7149627623561273, + "grad_norm": 1.035728859841243, + "learning_rate": 9.322916818514414e-06, + "loss": 2.0016, + "step": 528 + }, + { + "epoch": 0.7163168584969533, + "grad_norm": 0.8429686871803489, + "learning_rate": 9.32020376581281e-06, + "loss": 2.1271, + "step": 529 + }, + { + "epoch": 0.7176709546377793, + "grad_norm": 0.948124830150651, + "learning_rate": 9.317485684811065e-06, + "loss": 2.463, + "step": 530 + }, + { + "epoch": 0.7190250507786052, + "grad_norm": 1.0966777201484916, + "learning_rate": 9.31476257867276e-06, + "loss": 2.7078, + "step": 531 + }, + { + "epoch": 0.7203791469194313, + "grad_norm": 0.9601018625399497, + "learning_rate": 9.312034450567331e-06, + "loss": 2.0091, + "step": 532 + }, + { + "epoch": 0.7217332430602573, + "grad_norm": 1.0195440951721928, + "learning_rate": 9.309301303670053e-06, + "loss": 2.2595, + "step": 533 + }, + { + "epoch": 0.7230873392010833, + "grad_norm": 1.0365088177530009, + "learning_rate": 9.306563141162046e-06, + "loss": 2.5509, + "step": 534 + }, + { + "epoch": 0.7244414353419093, + "grad_norm": 0.89557097021405, + "learning_rate": 9.303819966230265e-06, + "loss": 2.184, + "step": 535 + }, + { + "epoch": 0.7257955314827352, + "grad_norm": 1.1201396043943863, + "learning_rate": 9.301071782067504e-06, + "loss": 2.2719, + "step": 536 + }, + { + "epoch": 0.7271496276235613, + "grad_norm": 1.0416986531898793, + "learning_rate": 9.298318591872381e-06, + "loss": 2.9307, + "step": 537 + }, + { + "epoch": 0.7285037237643873, + "grad_norm": 0.8874369002318888, + "learning_rate": 9.295560398849348e-06, + "loss": 2.0185, + "step": 538 + }, + { + "epoch": 0.7298578199052133, + "grad_norm": 1.3062496312023244, + "learning_rate": 9.29279720620867e-06, + "loss": 2.4988, + "step": 539 + }, + { + "epoch": 0.7312119160460393, + "grad_norm": 0.8723686506256735, + "learning_rate": 9.290029017166439e-06, + "loss": 2.1358, + "step": 540 + }, + { + "epoch": 0.7325660121868652, + "grad_norm": 0.9558509794048232, + "learning_rate": 9.287255834944563e-06, + "loss": 2.5911, + "step": 541 + }, + { + "epoch": 0.7339201083276913, + "grad_norm": 0.9765924363071334, + "learning_rate": 9.284477662770753e-06, + "loss": 2.2083, + "step": 542 + }, + { + "epoch": 0.7352742044685172, + "grad_norm": 0.9439430068504877, + "learning_rate": 9.281694503878536e-06, + "loss": 2.2064, + "step": 543 + }, + { + "epoch": 0.7366283006093433, + "grad_norm": 1.1103666583629448, + "learning_rate": 9.278906361507238e-06, + "loss": 2.2118, + "step": 544 + }, + { + "epoch": 0.7379823967501693, + "grad_norm": 1.015972947443489, + "learning_rate": 9.276113238901992e-06, + "loss": 2.3865, + "step": 545 + }, + { + "epoch": 0.7393364928909952, + "grad_norm": 0.9787500482208973, + "learning_rate": 9.273315139313719e-06, + "loss": 2.0949, + "step": 546 + }, + { + "epoch": 0.7406905890318213, + "grad_norm": 1.1247301478380771, + "learning_rate": 9.270512065999139e-06, + "loss": 1.9341, + "step": 547 + }, + { + "epoch": 0.7420446851726472, + "grad_norm": 0.8630459962648358, + "learning_rate": 9.267704022220758e-06, + "loss": 2.1934, + "step": 548 + }, + { + "epoch": 0.7433987813134733, + "grad_norm": 0.9456846452275267, + "learning_rate": 9.264891011246867e-06, + "loss": 2.1091, + "step": 549 + }, + { + "epoch": 0.7447528774542993, + "grad_norm": 0.9901864770446673, + "learning_rate": 9.26207303635154e-06, + "loss": 2.0778, + "step": 550 + }, + { + "epoch": 0.7461069735951252, + "grad_norm": 0.9659375715716257, + "learning_rate": 9.25925010081463e-06, + "loss": 2.2679, + "step": 551 + }, + { + "epoch": 0.7474610697359513, + "grad_norm": 0.9764027265044928, + "learning_rate": 9.256422207921757e-06, + "loss": 2.7468, + "step": 552 + }, + { + "epoch": 0.7488151658767772, + "grad_norm": 0.899162453540954, + "learning_rate": 9.25358936096432e-06, + "loss": 2.312, + "step": 553 + }, + { + "epoch": 0.7501692620176033, + "grad_norm": 0.7884101704032132, + "learning_rate": 9.250751563239473e-06, + "loss": 1.8254, + "step": 554 + }, + { + "epoch": 0.7515233581584293, + "grad_norm": 1.0492022449068705, + "learning_rate": 9.247908818050146e-06, + "loss": 2.6885, + "step": 555 + }, + { + "epoch": 0.7528774542992552, + "grad_norm": 1.0133767522172044, + "learning_rate": 9.245061128705017e-06, + "loss": 2.2732, + "step": 556 + }, + { + "epoch": 0.7542315504400813, + "grad_norm": 1.1740684334910383, + "learning_rate": 9.242208498518522e-06, + "loss": 2.4037, + "step": 557 + }, + { + "epoch": 0.7555856465809072, + "grad_norm": 1.0034773672404191, + "learning_rate": 9.239350930810843e-06, + "loss": 2.6555, + "step": 558 + }, + { + "epoch": 0.7569397427217333, + "grad_norm": 1.0559371991027349, + "learning_rate": 9.236488428907919e-06, + "loss": 1.8506, + "step": 559 + }, + { + "epoch": 0.7582938388625592, + "grad_norm": 0.9422190702574703, + "learning_rate": 9.233620996141421e-06, + "loss": 2.393, + "step": 560 + }, + { + "epoch": 0.7596479350033852, + "grad_norm": 1.701435588173234, + "learning_rate": 9.230748635848768e-06, + "loss": 2.4996, + "step": 561 + }, + { + "epoch": 0.7610020311442113, + "grad_norm": 0.9848241894053293, + "learning_rate": 9.227871351373108e-06, + "loss": 2.2563, + "step": 562 + }, + { + "epoch": 0.7623561272850372, + "grad_norm": 0.9551626201609839, + "learning_rate": 9.224989146063322e-06, + "loss": 2.3359, + "step": 563 + }, + { + "epoch": 0.7637102234258633, + "grad_norm": 1.0501196818584786, + "learning_rate": 9.22210202327402e-06, + "loss": 2.3484, + "step": 564 + }, + { + "epoch": 0.7650643195666892, + "grad_norm": 1.0118693595069355, + "learning_rate": 9.219209986365533e-06, + "loss": 2.0198, + "step": 565 + }, + { + "epoch": 0.7664184157075152, + "grad_norm": 1.000596790716306, + "learning_rate": 9.216313038703914e-06, + "loss": 2.4147, + "step": 566 + }, + { + "epoch": 0.7677725118483413, + "grad_norm": 1.0925850682201712, + "learning_rate": 9.21341118366093e-06, + "loss": 2.0607, + "step": 567 + }, + { + "epoch": 0.7691266079891672, + "grad_norm": 0.8195614498636685, + "learning_rate": 9.21050442461406e-06, + "loss": 2.3161, + "step": 568 + }, + { + "epoch": 0.7704807041299933, + "grad_norm": 1.4095142340461897, + "learning_rate": 9.207592764946491e-06, + "loss": 2.3281, + "step": 569 + }, + { + "epoch": 0.7718348002708192, + "grad_norm": 1.0305488353359658, + "learning_rate": 9.204676208047112e-06, + "loss": 2.209, + "step": 570 + }, + { + "epoch": 0.7731888964116452, + "grad_norm": 0.9457457073106245, + "learning_rate": 9.201754757310518e-06, + "loss": 1.9644, + "step": 571 + }, + { + "epoch": 0.7745429925524713, + "grad_norm": 0.8798589503356994, + "learning_rate": 9.198828416136991e-06, + "loss": 2.5588, + "step": 572 + }, + { + "epoch": 0.7758970886932972, + "grad_norm": 1.1021679000741063, + "learning_rate": 9.195897187932513e-06, + "loss": 2.2556, + "step": 573 + }, + { + "epoch": 0.7772511848341233, + "grad_norm": 1.2881391770879362, + "learning_rate": 9.192961076108748e-06, + "loss": 2.2786, + "step": 574 + }, + { + "epoch": 0.7786052809749492, + "grad_norm": 0.8079890380239451, + "learning_rate": 9.190020084083048e-06, + "loss": 1.9979, + "step": 575 + }, + { + "epoch": 0.7799593771157752, + "grad_norm": 2.3299606691457706, + "learning_rate": 9.187074215278444e-06, + "loss": 2.3944, + "step": 576 + }, + { + "epoch": 0.7813134732566012, + "grad_norm": 0.8875887654620506, + "learning_rate": 9.184123473123643e-06, + "loss": 3.159, + "step": 577 + }, + { + "epoch": 0.7826675693974272, + "grad_norm": 1.2732756612102565, + "learning_rate": 9.181167861053024e-06, + "loss": 2.5586, + "step": 578 + }, + { + "epoch": 0.7840216655382533, + "grad_norm": 1.1186141048074447, + "learning_rate": 9.178207382506634e-06, + "loss": 2.3656, + "step": 579 + }, + { + "epoch": 0.7853757616790792, + "grad_norm": 1.0038802401880105, + "learning_rate": 9.175242040930185e-06, + "loss": 2.3621, + "step": 580 + }, + { + "epoch": 0.7867298578199052, + "grad_norm": 1.0084194487112716, + "learning_rate": 9.172271839775046e-06, + "loss": 2.2325, + "step": 581 + }, + { + "epoch": 0.7880839539607312, + "grad_norm": 0.8495420286871743, + "learning_rate": 9.169296782498249e-06, + "loss": 2.4249, + "step": 582 + }, + { + "epoch": 0.7894380501015572, + "grad_norm": 1.0108960715157491, + "learning_rate": 9.166316872562467e-06, + "loss": 2.0571, + "step": 583 + }, + { + "epoch": 0.7907921462423833, + "grad_norm": 0.7599417123654713, + "learning_rate": 9.163332113436031e-06, + "loss": 2.2401, + "step": 584 + }, + { + "epoch": 0.7921462423832092, + "grad_norm": 0.9153720931911937, + "learning_rate": 9.160342508592916e-06, + "loss": 2.0296, + "step": 585 + }, + { + "epoch": 0.7935003385240352, + "grad_norm": 1.0606631317988464, + "learning_rate": 9.157348061512728e-06, + "loss": 1.995, + "step": 586 + }, + { + "epoch": 0.7948544346648612, + "grad_norm": 0.9696771933921041, + "learning_rate": 9.154348775680714e-06, + "loss": 2.4553, + "step": 587 + }, + { + "epoch": 0.7962085308056872, + "grad_norm": 1.1359820431460632, + "learning_rate": 9.151344654587758e-06, + "loss": 2.6147, + "step": 588 + }, + { + "epoch": 0.7975626269465133, + "grad_norm": 1.1620764175753133, + "learning_rate": 9.148335701730363e-06, + "loss": 2.1694, + "step": 589 + }, + { + "epoch": 0.7989167230873392, + "grad_norm": 1.0906791903175577, + "learning_rate": 9.145321920610662e-06, + "loss": 2.0159, + "step": 590 + }, + { + "epoch": 0.8002708192281652, + "grad_norm": 0.9262503779509312, + "learning_rate": 9.142303314736405e-06, + "loss": 1.8704, + "step": 591 + }, + { + "epoch": 0.8016249153689912, + "grad_norm": 0.8897378574716819, + "learning_rate": 9.139279887620955e-06, + "loss": 2.2212, + "step": 592 + }, + { + "epoch": 0.8029790115098172, + "grad_norm": 0.8785520958211444, + "learning_rate": 9.136251642783294e-06, + "loss": 2.2422, + "step": 593 + }, + { + "epoch": 0.8043331076506431, + "grad_norm": 1.4708082564367666, + "learning_rate": 9.133218583748002e-06, + "loss": 2.4264, + "step": 594 + }, + { + "epoch": 0.8056872037914692, + "grad_norm": 0.9127946818898369, + "learning_rate": 9.130180714045271e-06, + "loss": 1.985, + "step": 595 + }, + { + "epoch": 0.8070412999322952, + "grad_norm": 1.0273656011649834, + "learning_rate": 9.127138037210884e-06, + "loss": 2.1752, + "step": 596 + }, + { + "epoch": 0.8083953960731212, + "grad_norm": 1.0442611846851202, + "learning_rate": 9.124090556786227e-06, + "loss": 2.2373, + "step": 597 + }, + { + "epoch": 0.8097494922139472, + "grad_norm": 1.0020569110815791, + "learning_rate": 9.12103827631827e-06, + "loss": 2.6107, + "step": 598 + }, + { + "epoch": 0.8111035883547731, + "grad_norm": 1.0433877333186745, + "learning_rate": 9.117981199359575e-06, + "loss": 2.2095, + "step": 599 + }, + { + "epoch": 0.8124576844955992, + "grad_norm": 0.9172575390929391, + "learning_rate": 9.114919329468283e-06, + "loss": 2.0521, + "step": 600 + }, + { + "epoch": 0.8138117806364252, + "grad_norm": 1.0426202271128402, + "learning_rate": 9.111852670208116e-06, + "loss": 1.8817, + "step": 601 + }, + { + "epoch": 0.8151658767772512, + "grad_norm": 1.1301209307772506, + "learning_rate": 9.108781225148369e-06, + "loss": 3.0088, + "step": 602 + }, + { + "epoch": 0.8165199729180772, + "grad_norm": 0.9133026858806589, + "learning_rate": 9.105704997863907e-06, + "loss": 2.5472, + "step": 603 + }, + { + "epoch": 0.8178740690589031, + "grad_norm": 1.1108413215075008, + "learning_rate": 9.102623991935163e-06, + "loss": 2.5752, + "step": 604 + }, + { + "epoch": 0.8192281651997292, + "grad_norm": 0.9702100994035456, + "learning_rate": 9.099538210948128e-06, + "loss": 2.4404, + "step": 605 + }, + { + "epoch": 0.8205822613405552, + "grad_norm": 0.9485690500451401, + "learning_rate": 9.096447658494357e-06, + "loss": 2.2257, + "step": 606 + }, + { + "epoch": 0.8219363574813812, + "grad_norm": 26.292288625318744, + "learning_rate": 9.09335233817095e-06, + "loss": 2.676, + "step": 607 + }, + { + "epoch": 0.8232904536222072, + "grad_norm": 0.9757213752439725, + "learning_rate": 9.090252253580565e-06, + "loss": 2.083, + "step": 608 + }, + { + "epoch": 0.8246445497630331, + "grad_norm": 0.977754402583003, + "learning_rate": 9.087147408331399e-06, + "loss": 2.396, + "step": 609 + }, + { + "epoch": 0.8259986459038592, + "grad_norm": 1.1224622683564631, + "learning_rate": 9.084037806037194e-06, + "loss": 2.5127, + "step": 610 + }, + { + "epoch": 0.8273527420446851, + "grad_norm": 1.0702372697619176, + "learning_rate": 9.080923450317226e-06, + "loss": 2.44, + "step": 611 + }, + { + "epoch": 0.8287068381855112, + "grad_norm": 0.9854940413106023, + "learning_rate": 9.077804344796302e-06, + "loss": 2.3179, + "step": 612 + }, + { + "epoch": 0.8300609343263372, + "grad_norm": 1.0317420348048132, + "learning_rate": 9.074680493104764e-06, + "loss": 2.2255, + "step": 613 + }, + { + "epoch": 0.8314150304671631, + "grad_norm": 1.2553199368667127, + "learning_rate": 9.071551898878471e-06, + "loss": 2.196, + "step": 614 + }, + { + "epoch": 0.8327691266079892, + "grad_norm": 0.9428355967192141, + "learning_rate": 9.068418565758805e-06, + "loss": 2.4896, + "step": 615 + }, + { + "epoch": 0.8341232227488151, + "grad_norm": 0.919605019517011, + "learning_rate": 9.065280497392663e-06, + "loss": 2.724, + "step": 616 + }, + { + "epoch": 0.8354773188896412, + "grad_norm": 0.9006367820269072, + "learning_rate": 9.062137697432457e-06, + "loss": 2.3021, + "step": 617 + }, + { + "epoch": 0.8368314150304672, + "grad_norm": 0.9254413019481625, + "learning_rate": 9.058990169536098e-06, + "loss": 2.8054, + "step": 618 + }, + { + "epoch": 0.8381855111712931, + "grad_norm": 0.9723010342216781, + "learning_rate": 9.055837917367006e-06, + "loss": 2.1242, + "step": 619 + }, + { + "epoch": 0.8395396073121192, + "grad_norm": 0.9646095993807202, + "learning_rate": 9.052680944594101e-06, + "loss": 2.7603, + "step": 620 + }, + { + "epoch": 0.8408937034529451, + "grad_norm": 1.3152197929562175, + "learning_rate": 9.049519254891793e-06, + "loss": 2.1762, + "step": 621 + }, + { + "epoch": 0.8422477995937712, + "grad_norm": 0.8097257743614227, + "learning_rate": 9.046352851939981e-06, + "loss": 2.4746, + "step": 622 + }, + { + "epoch": 0.8436018957345972, + "grad_norm": 0.9993019751259021, + "learning_rate": 9.043181739424054e-06, + "loss": 1.8778, + "step": 623 + }, + { + "epoch": 0.8449559918754231, + "grad_norm": 1.1394511401717762, + "learning_rate": 9.040005921034884e-06, + "loss": 2.2527, + "step": 624 + }, + { + "epoch": 0.8463100880162492, + "grad_norm": 1.0167098829317125, + "learning_rate": 9.036825400468814e-06, + "loss": 2.4564, + "step": 625 + }, + { + "epoch": 0.8476641841570751, + "grad_norm": 1.0799560174241793, + "learning_rate": 9.033640181427661e-06, + "loss": 2.1033, + "step": 626 + }, + { + "epoch": 0.8490182802979012, + "grad_norm": 1.174120607908624, + "learning_rate": 9.030450267618717e-06, + "loss": 2.4757, + "step": 627 + }, + { + "epoch": 0.8503723764387271, + "grad_norm": 1.050685050196375, + "learning_rate": 9.02725566275473e-06, + "loss": 2.238, + "step": 628 + }, + { + "epoch": 0.8517264725795531, + "grad_norm": 0.9338038071047584, + "learning_rate": 9.024056370553916e-06, + "loss": 2.1313, + "step": 629 + }, + { + "epoch": 0.8530805687203792, + "grad_norm": 1.156893096752152, + "learning_rate": 9.020852394739936e-06, + "loss": 2.2162, + "step": 630 + }, + { + "epoch": 0.8544346648612051, + "grad_norm": 1.2811232350478499, + "learning_rate": 9.017643739041914e-06, + "loss": 2.9062, + "step": 631 + }, + { + "epoch": 0.8557887610020312, + "grad_norm": 1.3380924542089319, + "learning_rate": 9.014430407194413e-06, + "loss": 2.4905, + "step": 632 + }, + { + "epoch": 0.8571428571428571, + "grad_norm": 1.068909763409773, + "learning_rate": 9.011212402937441e-06, + "loss": 1.973, + "step": 633 + }, + { + "epoch": 0.8584969532836831, + "grad_norm": 1.8932248755263266, + "learning_rate": 9.007989730016446e-06, + "loss": 2.4409, + "step": 634 + }, + { + "epoch": 0.8598510494245092, + "grad_norm": 1.2891934792560118, + "learning_rate": 9.004762392182307e-06, + "loss": 2.3235, + "step": 635 + }, + { + "epoch": 0.8612051455653351, + "grad_norm": 0.858242619324268, + "learning_rate": 9.001530393191334e-06, + "loss": 1.9865, + "step": 636 + }, + { + "epoch": 0.8625592417061612, + "grad_norm": 1.1164311194466396, + "learning_rate": 8.998293736805262e-06, + "loss": 2.3765, + "step": 637 + }, + { + "epoch": 0.8639133378469871, + "grad_norm": 1.2157891571025403, + "learning_rate": 8.995052426791247e-06, + "loss": 2.9348, + "step": 638 + }, + { + "epoch": 0.8652674339878131, + "grad_norm": 1.0909914707099782, + "learning_rate": 8.991806466921858e-06, + "loss": 2.8517, + "step": 639 + }, + { + "epoch": 0.8666215301286392, + "grad_norm": 0.9001967266754044, + "learning_rate": 8.988555860975082e-06, + "loss": 2.3269, + "step": 640 + }, + { + "epoch": 0.8679756262694651, + "grad_norm": 1.0105961392957552, + "learning_rate": 8.98530061273431e-06, + "loss": 2.101, + "step": 641 + }, + { + "epoch": 0.8693297224102912, + "grad_norm": 0.9677003303068539, + "learning_rate": 8.982040725988337e-06, + "loss": 2.2758, + "step": 642 + }, + { + "epoch": 0.8706838185511171, + "grad_norm": 0.9813548892015744, + "learning_rate": 8.978776204531354e-06, + "loss": 2.33, + "step": 643 + }, + { + "epoch": 0.8720379146919431, + "grad_norm": 1.0150274086140143, + "learning_rate": 8.975507052162954e-06, + "loss": 2.3066, + "step": 644 + }, + { + "epoch": 0.8733920108327691, + "grad_norm": 1.0301765250355548, + "learning_rate": 8.972233272688107e-06, + "loss": 2.1587, + "step": 645 + }, + { + "epoch": 0.8747461069735951, + "grad_norm": 1.1970855668426001, + "learning_rate": 8.968954869917183e-06, + "loss": 2.3845, + "step": 646 + }, + { + "epoch": 0.8761002031144212, + "grad_norm": 1.1034962442803717, + "learning_rate": 8.965671847665925e-06, + "loss": 1.7817, + "step": 647 + }, + { + "epoch": 0.8774542992552471, + "grad_norm": 0.9455749614353804, + "learning_rate": 8.962384209755453e-06, + "loss": 1.8517, + "step": 648 + }, + { + "epoch": 0.8788083953960731, + "grad_norm": 0.9934755324282731, + "learning_rate": 8.95909196001226e-06, + "loss": 2.2541, + "step": 649 + }, + { + "epoch": 0.8801624915368991, + "grad_norm": 1.0284578656567072, + "learning_rate": 8.955795102268206e-06, + "loss": 2.3968, + "step": 650 + }, + { + "epoch": 0.8815165876777251, + "grad_norm": 1.037663714131099, + "learning_rate": 8.952493640360518e-06, + "loss": 2.6138, + "step": 651 + }, + { + "epoch": 0.8828706838185512, + "grad_norm": 1.0926666615727456, + "learning_rate": 8.949187578131777e-06, + "loss": 1.9031, + "step": 652 + }, + { + "epoch": 0.8842247799593771, + "grad_norm": 1.110544847466399, + "learning_rate": 8.945876919429922e-06, + "loss": 2.2067, + "step": 653 + }, + { + "epoch": 0.8855788761002031, + "grad_norm": 1.0868074118477926, + "learning_rate": 8.942561668108236e-06, + "loss": 2.3532, + "step": 654 + }, + { + "epoch": 0.8869329722410291, + "grad_norm": 1.1283691827694036, + "learning_rate": 8.939241828025356e-06, + "loss": 2.2511, + "step": 655 + }, + { + "epoch": 0.8882870683818551, + "grad_norm": 1.9053437775194237, + "learning_rate": 8.935917403045251e-06, + "loss": 2.1626, + "step": 656 + }, + { + "epoch": 0.8896411645226812, + "grad_norm": 1.2141944808281238, + "learning_rate": 8.932588397037236e-06, + "loss": 2.3409, + "step": 657 + }, + { + "epoch": 0.8909952606635071, + "grad_norm": 1.0922208185194722, + "learning_rate": 8.92925481387595e-06, + "loss": 1.7841, + "step": 658 + }, + { + "epoch": 0.8923493568043331, + "grad_norm": 1.5140227295690092, + "learning_rate": 8.92591665744136e-06, + "loss": 2.369, + "step": 659 + }, + { + "epoch": 0.8937034529451591, + "grad_norm": 0.8526913110142397, + "learning_rate": 8.922573931618763e-06, + "loss": 2.0121, + "step": 660 + }, + { + "epoch": 0.8950575490859851, + "grad_norm": 1.043736998424973, + "learning_rate": 8.919226640298763e-06, + "loss": 2.2168, + "step": 661 + }, + { + "epoch": 0.896411645226811, + "grad_norm": 0.8649236144416748, + "learning_rate": 8.915874787377289e-06, + "loss": 1.9414, + "step": 662 + }, + { + "epoch": 0.8977657413676371, + "grad_norm": 1.0840827090139065, + "learning_rate": 8.912518376755572e-06, + "loss": 3.087, + "step": 663 + }, + { + "epoch": 0.8991198375084631, + "grad_norm": 1.1561825732631927, + "learning_rate": 8.90915741234015e-06, + "loss": 2.1006, + "step": 664 + }, + { + "epoch": 0.9004739336492891, + "grad_norm": 0.9793291395752391, + "learning_rate": 8.905791898042861e-06, + "loss": 1.85, + "step": 665 + }, + { + "epoch": 0.9018280297901151, + "grad_norm": 1.2185211503560396, + "learning_rate": 8.902421837780839e-06, + "loss": 2.7844, + "step": 666 + }, + { + "epoch": 0.903182125930941, + "grad_norm": 1.0236959874012588, + "learning_rate": 8.89904723547651e-06, + "loss": 2.1372, + "step": 667 + }, + { + "epoch": 0.9045362220717671, + "grad_norm": 1.0598101921330767, + "learning_rate": 8.895668095057584e-06, + "loss": 2.413, + "step": 668 + }, + { + "epoch": 0.9058903182125931, + "grad_norm": 0.9973400634976877, + "learning_rate": 8.892284420457054e-06, + "loss": 2.1688, + "step": 669 + }, + { + "epoch": 0.9072444143534191, + "grad_norm": 1.11854939708825, + "learning_rate": 8.888896215613192e-06, + "loss": 2.3154, + "step": 670 + }, + { + "epoch": 0.9085985104942451, + "grad_norm": 1.0425526725686658, + "learning_rate": 8.885503484469539e-06, + "loss": 2.2115, + "step": 671 + }, + { + "epoch": 0.909952606635071, + "grad_norm": 0.9579083361963218, + "learning_rate": 8.88210623097491e-06, + "loss": 2.0552, + "step": 672 + }, + { + "epoch": 0.9113067027758971, + "grad_norm": 0.8237407553812768, + "learning_rate": 8.878704459083376e-06, + "loss": 2.8482, + "step": 673 + }, + { + "epoch": 0.9126607989167231, + "grad_norm": 0.8200385146933777, + "learning_rate": 8.875298172754274e-06, + "loss": 2.1437, + "step": 674 + }, + { + "epoch": 0.9140148950575491, + "grad_norm": 1.2074180626236117, + "learning_rate": 8.871887375952192e-06, + "loss": 2.0688, + "step": 675 + }, + { + "epoch": 0.9153689911983751, + "grad_norm": 1.2749052909022498, + "learning_rate": 8.868472072646968e-06, + "loss": 2.4633, + "step": 676 + }, + { + "epoch": 0.916723087339201, + "grad_norm": 1.219617129030152, + "learning_rate": 8.865052266813686e-06, + "loss": 2.3246, + "step": 677 + }, + { + "epoch": 0.9180771834800271, + "grad_norm": 0.981406917677342, + "learning_rate": 8.861627962432669e-06, + "loss": 2.2168, + "step": 678 + }, + { + "epoch": 0.919431279620853, + "grad_norm": 1.1941352888187047, + "learning_rate": 8.858199163489476e-06, + "loss": 2.0688, + "step": 679 + }, + { + "epoch": 0.9207853757616791, + "grad_norm": 1.0302188684262572, + "learning_rate": 8.854765873974898e-06, + "loss": 2.6023, + "step": 680 + }, + { + "epoch": 0.9221394719025051, + "grad_norm": 1.0315351443360778, + "learning_rate": 8.851328097884956e-06, + "loss": 2.4473, + "step": 681 + }, + { + "epoch": 0.923493568043331, + "grad_norm": 0.9270825061496434, + "learning_rate": 8.847885839220884e-06, + "loss": 2.3396, + "step": 682 + }, + { + "epoch": 0.9248476641841571, + "grad_norm": 1.5668051937578615, + "learning_rate": 8.844439101989145e-06, + "loss": 2.1618, + "step": 683 + }, + { + "epoch": 0.926201760324983, + "grad_norm": 1.063281721298216, + "learning_rate": 8.840987890201404e-06, + "loss": 2.0825, + "step": 684 + }, + { + "epoch": 0.9275558564658091, + "grad_norm": 5.49994743188365, + "learning_rate": 8.837532207874539e-06, + "loss": 2.3203, + "step": 685 + }, + { + "epoch": 0.9289099526066351, + "grad_norm": 0.805384496899143, + "learning_rate": 8.834072059030631e-06, + "loss": 2.2733, + "step": 686 + }, + { + "epoch": 0.930264048747461, + "grad_norm": 1.2279223754931932, + "learning_rate": 8.83060744769696e-06, + "loss": 2.519, + "step": 687 + }, + { + "epoch": 0.9316181448882871, + "grad_norm": 0.9224586328427765, + "learning_rate": 8.827138377905999e-06, + "loss": 2.8326, + "step": 688 + }, + { + "epoch": 0.932972241029113, + "grad_norm": 1.6089966176184487, + "learning_rate": 8.823664853695408e-06, + "loss": 1.6853, + "step": 689 + }, + { + "epoch": 0.9343263371699391, + "grad_norm": 0.8870008681490367, + "learning_rate": 8.820186879108038e-06, + "loss": 2.4482, + "step": 690 + }, + { + "epoch": 0.9356804333107651, + "grad_norm": 0.91606230626226, + "learning_rate": 8.816704458191913e-06, + "loss": 2.4619, + "step": 691 + }, + { + "epoch": 0.937034529451591, + "grad_norm": 1.103378118700351, + "learning_rate": 8.813217595000234e-06, + "loss": 2.0875, + "step": 692 + }, + { + "epoch": 0.9383886255924171, + "grad_norm": 1.1782658973751619, + "learning_rate": 8.809726293591376e-06, + "loss": 2.2522, + "step": 693 + }, + { + "epoch": 0.939742721733243, + "grad_norm": 3.1082108827531383, + "learning_rate": 8.806230558028874e-06, + "loss": 2.4234, + "step": 694 + }, + { + "epoch": 0.9410968178740691, + "grad_norm": 1.2771263022311399, + "learning_rate": 8.80273039238143e-06, + "loss": 2.2953, + "step": 695 + }, + { + "epoch": 0.942450914014895, + "grad_norm": 1.0851177832663736, + "learning_rate": 8.799225800722895e-06, + "loss": 2.1023, + "step": 696 + }, + { + "epoch": 0.943805010155721, + "grad_norm": 0.8982930726085163, + "learning_rate": 8.795716787132278e-06, + "loss": 2.261, + "step": 697 + }, + { + "epoch": 0.9451591062965471, + "grad_norm": 0.9710609235588211, + "learning_rate": 8.792203355693731e-06, + "loss": 2.0593, + "step": 698 + }, + { + "epoch": 0.946513202437373, + "grad_norm": 1.1589767335158037, + "learning_rate": 8.788685510496549e-06, + "loss": 2.2502, + "step": 699 + }, + { + "epoch": 0.9478672985781991, + "grad_norm": 0.923602460813735, + "learning_rate": 8.785163255635166e-06, + "loss": 2.1052, + "step": 700 + }, + { + "epoch": 0.949221394719025, + "grad_norm": 1.1331497754597915, + "learning_rate": 8.781636595209145e-06, + "loss": 2.2342, + "step": 701 + }, + { + "epoch": 0.950575490859851, + "grad_norm": 1.0911649916589345, + "learning_rate": 8.778105533323177e-06, + "loss": 2.5234, + "step": 702 + }, + { + "epoch": 0.9519295870006771, + "grad_norm": 0.7939604108123635, + "learning_rate": 8.77457007408708e-06, + "loss": 2.3947, + "step": 703 + }, + { + "epoch": 0.953283683141503, + "grad_norm": 0.9952541607703701, + "learning_rate": 8.771030221615786e-06, + "loss": 2.4793, + "step": 704 + }, + { + "epoch": 0.9546377792823291, + "grad_norm": 1.0107073531709874, + "learning_rate": 8.767485980029342e-06, + "loss": 2.0951, + "step": 705 + }, + { + "epoch": 0.955991875423155, + "grad_norm": 1.0100357247296368, + "learning_rate": 8.763937353452902e-06, + "loss": 2.1011, + "step": 706 + }, + { + "epoch": 0.957345971563981, + "grad_norm": 0.9804384454423593, + "learning_rate": 8.760384346016726e-06, + "loss": 1.7807, + "step": 707 + }, + { + "epoch": 0.9587000677048071, + "grad_norm": 1.948525190001482, + "learning_rate": 8.756826961856171e-06, + "loss": 2.2514, + "step": 708 + }, + { + "epoch": 0.960054163845633, + "grad_norm": 2.325650876165679, + "learning_rate": 8.75326520511169e-06, + "loss": 2.43, + "step": 709 + }, + { + "epoch": 0.9614082599864591, + "grad_norm": 1.4411906872847189, + "learning_rate": 8.74969907992882e-06, + "loss": 2.5396, + "step": 710 + }, + { + "epoch": 0.962762356127285, + "grad_norm": 1.1588565975941216, + "learning_rate": 8.746128590458191e-06, + "loss": 2.2853, + "step": 711 + }, + { + "epoch": 0.964116452268111, + "grad_norm": 1.018506381733888, + "learning_rate": 8.742553740855507e-06, + "loss": 2.3352, + "step": 712 + }, + { + "epoch": 0.965470548408937, + "grad_norm": 0.9885672968116678, + "learning_rate": 8.738974535281545e-06, + "loss": 2.3807, + "step": 713 + }, + { + "epoch": 0.966824644549763, + "grad_norm": 0.9360638976400335, + "learning_rate": 8.735390977902159e-06, + "loss": 2.522, + "step": 714 + }, + { + "epoch": 0.9681787406905891, + "grad_norm": 1.0138074790076992, + "learning_rate": 8.731803072888262e-06, + "loss": 2.0687, + "step": 715 + }, + { + "epoch": 0.969532836831415, + "grad_norm": 0.8593670335614861, + "learning_rate": 8.728210824415829e-06, + "loss": 2.3171, + "step": 716 + }, + { + "epoch": 0.970886932972241, + "grad_norm": 0.8782947340827698, + "learning_rate": 8.724614236665889e-06, + "loss": 2.1826, + "step": 717 + }, + { + "epoch": 0.972241029113067, + "grad_norm": 1.041877641264968, + "learning_rate": 8.721013313824527e-06, + "loss": 2.5999, + "step": 718 + }, + { + "epoch": 0.973595125253893, + "grad_norm": 1.0184265790630844, + "learning_rate": 8.717408060082865e-06, + "loss": 2.3649, + "step": 719 + }, + { + "epoch": 0.9749492213947191, + "grad_norm": 0.8805171555606713, + "learning_rate": 8.713798479637073e-06, + "loss": 2.5529, + "step": 720 + }, + { + "epoch": 0.976303317535545, + "grad_norm": 0.8250466080220131, + "learning_rate": 8.710184576688353e-06, + "loss": 2.1707, + "step": 721 + }, + { + "epoch": 0.977657413676371, + "grad_norm": 0.9735738962932305, + "learning_rate": 8.70656635544294e-06, + "loss": 1.9474, + "step": 722 + }, + { + "epoch": 0.979011509817197, + "grad_norm": 0.9574755177161404, + "learning_rate": 8.702943820112094e-06, + "loss": 2.0991, + "step": 723 + }, + { + "epoch": 0.980365605958023, + "grad_norm": 1.1008036232420575, + "learning_rate": 8.699316974912097e-06, + "loss": 2.6044, + "step": 724 + }, + { + "epoch": 0.9817197020988491, + "grad_norm": 1.0106221199017615, + "learning_rate": 8.695685824064246e-06, + "loss": 2.4405, + "step": 725 + }, + { + "epoch": 0.983073798239675, + "grad_norm": 1.167558559334278, + "learning_rate": 8.692050371794849e-06, + "loss": 2.6635, + "step": 726 + }, + { + "epoch": 0.984427894380501, + "grad_norm": 0.8542048524860696, + "learning_rate": 8.688410622335222e-06, + "loss": 2.5169, + "step": 727 + }, + { + "epoch": 0.985781990521327, + "grad_norm": 1.0044160146409207, + "learning_rate": 8.684766579921684e-06, + "loss": 2.477, + "step": 728 + }, + { + "epoch": 0.987136086662153, + "grad_norm": 0.9438656631341891, + "learning_rate": 8.681118248795548e-06, + "loss": 1.9811, + "step": 729 + }, + { + "epoch": 0.988490182802979, + "grad_norm": 0.9095360753452296, + "learning_rate": 8.677465633203117e-06, + "loss": 2.3501, + "step": 730 + }, + { + "epoch": 0.989844278943805, + "grad_norm": 1.5751129180562193, + "learning_rate": 8.673808737395686e-06, + "loss": 1.9711, + "step": 731 + }, + { + "epoch": 0.991198375084631, + "grad_norm": 1.0244041655385774, + "learning_rate": 8.670147565629526e-06, + "loss": 2.1411, + "step": 732 + }, + { + "epoch": 0.992552471225457, + "grad_norm": 1.0999564192877611, + "learning_rate": 8.66648212216589e-06, + "loss": 2.7566, + "step": 733 + }, + { + "epoch": 0.993906567366283, + "grad_norm": 1.0783864346910368, + "learning_rate": 8.662812411270997e-06, + "loss": 2.2272, + "step": 734 + }, + { + "epoch": 0.995260663507109, + "grad_norm": 1.011628347758097, + "learning_rate": 8.65913843721604e-06, + "loss": 2.5275, + "step": 735 + }, + { + "epoch": 0.996614759647935, + "grad_norm": 1.2637326655889927, + "learning_rate": 8.655460204277167e-06, + "loss": 2.7871, + "step": 736 + }, + { + "epoch": 0.997968855788761, + "grad_norm": 0.8940421539736993, + "learning_rate": 8.651777716735488e-06, + "loss": 2.8055, + "step": 737 + }, + { + "epoch": 0.999322951929587, + "grad_norm": 0.8894453146981818, + "learning_rate": 8.648090978877063e-06, + "loss": 2.2385, + "step": 738 + }, + { + "epoch": 1.000677048070413, + "grad_norm": 0.9588436493567126, + "learning_rate": 8.644399994992898e-06, + "loss": 2.1316, + "step": 739 + }, + { + "epoch": 1.001354096140826, + "grad_norm": 1.042596861537762, + "learning_rate": 8.640704769378943e-06, + "loss": 2.1595, + "step": 740 + }, + { + "epoch": 1.002708192281652, + "grad_norm": 0.9542700464860409, + "learning_rate": 8.637005306336083e-06, + "loss": 2.2683, + "step": 741 + }, + { + "epoch": 1.004062288422478, + "grad_norm": 1.0792626697240038, + "learning_rate": 8.633301610170136e-06, + "loss": 1.9535, + "step": 742 + }, + { + "epoch": 1.005416384563304, + "grad_norm": 0.9813632090639985, + "learning_rate": 8.629593685191846e-06, + "loss": 2.3908, + "step": 743 + }, + { + "epoch": 1.00677048070413, + "grad_norm": 1.0359954057575655, + "learning_rate": 8.625881535716883e-06, + "loss": 2.6186, + "step": 744 + }, + { + "epoch": 1.008124576844956, + "grad_norm": 0.9759453555493578, + "learning_rate": 8.622165166065828e-06, + "loss": 1.926, + "step": 745 + }, + { + "epoch": 1.009478672985782, + "grad_norm": 1.086383812605352, + "learning_rate": 8.618444580564175e-06, + "loss": 2.4117, + "step": 746 + }, + { + "epoch": 1.010832769126608, + "grad_norm": 1.066485208921513, + "learning_rate": 8.61471978354233e-06, + "loss": 2.2121, + "step": 747 + }, + { + "epoch": 1.012186865267434, + "grad_norm": 1.0075256921761013, + "learning_rate": 8.610990779335594e-06, + "loss": 2.2686, + "step": 748 + }, + { + "epoch": 1.01354096140826, + "grad_norm": 4.05838919324133, + "learning_rate": 8.60725757228417e-06, + "loss": 2.3371, + "step": 749 + }, + { + "epoch": 1.014895057549086, + "grad_norm": 1.0649638809266402, + "learning_rate": 8.603520166733153e-06, + "loss": 2.2376, + "step": 750 + }, + { + "epoch": 1.016249153689912, + "grad_norm": 0.9312921856108541, + "learning_rate": 8.599778567032515e-06, + "loss": 2.2602, + "step": 751 + }, + { + "epoch": 1.017603249830738, + "grad_norm": 1.013555848121877, + "learning_rate": 8.596032777537124e-06, + "loss": 2.2348, + "step": 752 + }, + { + "epoch": 1.018957345971564, + "grad_norm": 1.0949907267977028, + "learning_rate": 8.592282802606713e-06, + "loss": 2.2799, + "step": 753 + }, + { + "epoch": 1.02031144211239, + "grad_norm": 0.8061510705350392, + "learning_rate": 8.588528646605893e-06, + "loss": 2.1358, + "step": 754 + }, + { + "epoch": 1.021665538253216, + "grad_norm": 0.8912985567283312, + "learning_rate": 8.584770313904138e-06, + "loss": 2.1013, + "step": 755 + }, + { + "epoch": 1.0230196343940419, + "grad_norm": 0.938063703434926, + "learning_rate": 8.581007808875786e-06, + "loss": 2.4889, + "step": 756 + }, + { + "epoch": 1.024373730534868, + "grad_norm": 0.7577265350168257, + "learning_rate": 8.577241135900026e-06, + "loss": 1.8219, + "step": 757 + }, + { + "epoch": 1.025727826675694, + "grad_norm": 1.0606203403633938, + "learning_rate": 8.573470299360906e-06, + "loss": 2.4437, + "step": 758 + }, + { + "epoch": 1.02708192281652, + "grad_norm": 1.0551285967576851, + "learning_rate": 8.569695303647313e-06, + "loss": 2.1422, + "step": 759 + }, + { + "epoch": 1.028436018957346, + "grad_norm": 0.9409375423377818, + "learning_rate": 8.565916153152982e-06, + "loss": 2.1203, + "step": 760 + }, + { + "epoch": 1.0297901150981719, + "grad_norm": 1.1003263863457742, + "learning_rate": 8.562132852276474e-06, + "loss": 2.338, + "step": 761 + }, + { + "epoch": 1.031144211238998, + "grad_norm": 0.9457106296289286, + "learning_rate": 8.558345405421189e-06, + "loss": 1.9891, + "step": 762 + }, + { + "epoch": 1.032498307379824, + "grad_norm": 1.0094972443672454, + "learning_rate": 8.554553816995347e-06, + "loss": 2.064, + "step": 763 + }, + { + "epoch": 1.03385240352065, + "grad_norm": 0.9931533657371708, + "learning_rate": 8.550758091411993e-06, + "loss": 2.5872, + "step": 764 + }, + { + "epoch": 1.035206499661476, + "grad_norm": 1.00970787542275, + "learning_rate": 8.546958233088986e-06, + "loss": 1.6308, + "step": 765 + }, + { + "epoch": 1.0365605958023019, + "grad_norm": 0.9046067132912955, + "learning_rate": 8.543154246448992e-06, + "loss": 2.4595, + "step": 766 + }, + { + "epoch": 1.037914691943128, + "grad_norm": 0.8869062626220933, + "learning_rate": 8.539346135919485e-06, + "loss": 2.2769, + "step": 767 + }, + { + "epoch": 1.039268788083954, + "grad_norm": 1.1369978829634468, + "learning_rate": 8.535533905932739e-06, + "loss": 2.2687, + "step": 768 + }, + { + "epoch": 1.04062288422478, + "grad_norm": 1.000069084621953, + "learning_rate": 8.531717560925819e-06, + "loss": 1.9722, + "step": 769 + }, + { + "epoch": 1.041976980365606, + "grad_norm": 1.1369770696046089, + "learning_rate": 8.527897105340587e-06, + "loss": 2.3523, + "step": 770 + }, + { + "epoch": 1.0433310765064319, + "grad_norm": 1.2290321379198472, + "learning_rate": 8.52407254362368e-06, + "loss": 2.2146, + "step": 771 + }, + { + "epoch": 1.044685172647258, + "grad_norm": 1.0959412542423825, + "learning_rate": 8.52024388022652e-06, + "loss": 2.6636, + "step": 772 + }, + { + "epoch": 1.046039268788084, + "grad_norm": 1.0258500561432276, + "learning_rate": 8.516411119605303e-06, + "loss": 2.8278, + "step": 773 + }, + { + "epoch": 1.04739336492891, + "grad_norm": 0.9866371420929005, + "learning_rate": 8.512574266220991e-06, + "loss": 2.5915, + "step": 774 + }, + { + "epoch": 1.048747461069736, + "grad_norm": 1.0612020165548255, + "learning_rate": 8.50873332453931e-06, + "loss": 2.4831, + "step": 775 + }, + { + "epoch": 1.0501015572105619, + "grad_norm": 0.9966103250455697, + "learning_rate": 8.504888299030748e-06, + "loss": 2.1428, + "step": 776 + }, + { + "epoch": 1.051455653351388, + "grad_norm": 1.0897590636471566, + "learning_rate": 8.501039194170542e-06, + "loss": 2.0724, + "step": 777 + }, + { + "epoch": 1.052809749492214, + "grad_norm": 1.0670474685447335, + "learning_rate": 8.497186014438677e-06, + "loss": 1.9695, + "step": 778 + }, + { + "epoch": 1.05416384563304, + "grad_norm": 1.1907785256874888, + "learning_rate": 8.493328764319886e-06, + "loss": 2.4858, + "step": 779 + }, + { + "epoch": 1.055517941773866, + "grad_norm": 1.2893351808110196, + "learning_rate": 8.489467448303633e-06, + "loss": 2.7964, + "step": 780 + }, + { + "epoch": 1.0568720379146919, + "grad_norm": 1.0302272518222444, + "learning_rate": 8.485602070884118e-06, + "loss": 2.3342, + "step": 781 + }, + { + "epoch": 1.058226134055518, + "grad_norm": 1.0206240865882117, + "learning_rate": 8.481732636560266e-06, + "loss": 2.4793, + "step": 782 + }, + { + "epoch": 1.059580230196344, + "grad_norm": 1.0121397248564201, + "learning_rate": 8.477859149835726e-06, + "loss": 1.8307, + "step": 783 + }, + { + "epoch": 1.06093432633717, + "grad_norm": 1.0578580561059918, + "learning_rate": 8.473981615218863e-06, + "loss": 2.1949, + "step": 784 + }, + { + "epoch": 1.062288422477996, + "grad_norm": 0.9969690177718763, + "learning_rate": 8.470100037222751e-06, + "loss": 2.1533, + "step": 785 + }, + { + "epoch": 1.0636425186188219, + "grad_norm": 1.1212678356984322, + "learning_rate": 8.466214420365176e-06, + "loss": 2.1861, + "step": 786 + }, + { + "epoch": 1.064996614759648, + "grad_norm": 0.9923066032834624, + "learning_rate": 8.462324769168618e-06, + "loss": 2.4372, + "step": 787 + }, + { + "epoch": 1.066350710900474, + "grad_norm": 1.2404978604054542, + "learning_rate": 8.458431088160255e-06, + "loss": 2.0584, + "step": 788 + }, + { + "epoch": 1.0677048070413, + "grad_norm": 1.060465239616255, + "learning_rate": 8.454533381871957e-06, + "loss": 2.3261, + "step": 789 + }, + { + "epoch": 1.069058903182126, + "grad_norm": 1.0780439600876397, + "learning_rate": 8.450631654840279e-06, + "loss": 2.2541, + "step": 790 + }, + { + "epoch": 1.0704129993229519, + "grad_norm": 1.0543355817090738, + "learning_rate": 8.446725911606451e-06, + "loss": 2.3729, + "step": 791 + }, + { + "epoch": 1.071767095463778, + "grad_norm": 1.5929978362842498, + "learning_rate": 8.442816156716386e-06, + "loss": 2.2915, + "step": 792 + }, + { + "epoch": 1.073121191604604, + "grad_norm": 0.8559195523501567, + "learning_rate": 8.438902394720659e-06, + "loss": 1.9731, + "step": 793 + }, + { + "epoch": 1.07447528774543, + "grad_norm": 1.0220594117408883, + "learning_rate": 8.43498463017451e-06, + "loss": 2.0958, + "step": 794 + }, + { + "epoch": 1.0758293838862558, + "grad_norm": 1.0326806004121811, + "learning_rate": 8.431062867637838e-06, + "loss": 2.0738, + "step": 795 + }, + { + "epoch": 1.0771834800270819, + "grad_norm": 1.18276220665135, + "learning_rate": 8.4271371116752e-06, + "loss": 2.3479, + "step": 796 + }, + { + "epoch": 1.078537576167908, + "grad_norm": 1.1320006680638006, + "learning_rate": 8.423207366855794e-06, + "loss": 1.8838, + "step": 797 + }, + { + "epoch": 1.079891672308734, + "grad_norm": 0.8967800856440162, + "learning_rate": 8.419273637753463e-06, + "loss": 2.2114, + "step": 798 + }, + { + "epoch": 1.08124576844956, + "grad_norm": 0.9935120415012008, + "learning_rate": 8.415335928946691e-06, + "loss": 2.3093, + "step": 799 + }, + { + "epoch": 1.0825998645903858, + "grad_norm": 0.9813682594289855, + "learning_rate": 8.411394245018589e-06, + "loss": 2.2393, + "step": 800 + }, + { + "epoch": 1.0839539607312119, + "grad_norm": 1.0470052038748332, + "learning_rate": 8.407448590556895e-06, + "loss": 2.0396, + "step": 801 + }, + { + "epoch": 1.085308056872038, + "grad_norm": 1.0097113780081433, + "learning_rate": 8.403498970153973e-06, + "loss": 2.1874, + "step": 802 + }, + { + "epoch": 1.086662153012864, + "grad_norm": 0.9573725041864002, + "learning_rate": 8.399545388406798e-06, + "loss": 2.2734, + "step": 803 + }, + { + "epoch": 1.08801624915369, + "grad_norm": 0.9578847774313853, + "learning_rate": 8.39558784991696e-06, + "loss": 2.186, + "step": 804 + }, + { + "epoch": 1.0893703452945158, + "grad_norm": 0.9151868078460803, + "learning_rate": 8.391626359290648e-06, + "loss": 2.0554, + "step": 805 + }, + { + "epoch": 1.0907244414353419, + "grad_norm": 0.9630038514154003, + "learning_rate": 8.387660921138657e-06, + "loss": 1.6792, + "step": 806 + }, + { + "epoch": 1.092078537576168, + "grad_norm": 1.0284639983718127, + "learning_rate": 8.383691540076372e-06, + "loss": 2.795, + "step": 807 + }, + { + "epoch": 1.093432633716994, + "grad_norm": 1.0684029705952454, + "learning_rate": 8.379718220723772e-06, + "loss": 2.2904, + "step": 808 + }, + { + "epoch": 1.09478672985782, + "grad_norm": 1.0034018245502179, + "learning_rate": 8.375740967705418e-06, + "loss": 2.0063, + "step": 809 + }, + { + "epoch": 1.0961408259986458, + "grad_norm": 0.9850203635464737, + "learning_rate": 8.371759785650444e-06, + "loss": 2.0104, + "step": 810 + }, + { + "epoch": 1.0974949221394719, + "grad_norm": 1.0272152976204258, + "learning_rate": 8.367774679192566e-06, + "loss": 2.4944, + "step": 811 + }, + { + "epoch": 1.098849018280298, + "grad_norm": 1.014673449942826, + "learning_rate": 8.363785652970062e-06, + "loss": 2.4625, + "step": 812 + }, + { + "epoch": 1.100203114421124, + "grad_norm": 0.9983178702745071, + "learning_rate": 8.359792711625773e-06, + "loss": 2.6321, + "step": 813 + }, + { + "epoch": 1.10155721056195, + "grad_norm": 1.2478151045690562, + "learning_rate": 8.355795859807097e-06, + "loss": 2.6152, + "step": 814 + }, + { + "epoch": 1.1029113067027758, + "grad_norm": 1.1326333593128552, + "learning_rate": 8.351795102165983e-06, + "loss": 2.3933, + "step": 815 + }, + { + "epoch": 1.1042654028436019, + "grad_norm": 1.2156960926171398, + "learning_rate": 8.34779044335893e-06, + "loss": 2.5298, + "step": 816 + }, + { + "epoch": 1.105619498984428, + "grad_norm": 1.0345252180165674, + "learning_rate": 8.343781888046971e-06, + "loss": 2.0308, + "step": 817 + }, + { + "epoch": 1.106973595125254, + "grad_norm": 0.8846702899770009, + "learning_rate": 8.339769440895678e-06, + "loss": 2.0716, + "step": 818 + }, + { + "epoch": 1.10832769126608, + "grad_norm": 0.9633796785097113, + "learning_rate": 8.335753106575153e-06, + "loss": 1.9232, + "step": 819 + }, + { + "epoch": 1.1096817874069058, + "grad_norm": 1.0958725343390576, + "learning_rate": 8.331732889760021e-06, + "loss": 2.2685, + "step": 820 + }, + { + "epoch": 1.1110358835477319, + "grad_norm": 0.9434748502922011, + "learning_rate": 8.327708795129426e-06, + "loss": 2.5492, + "step": 821 + }, + { + "epoch": 1.112389979688558, + "grad_norm": 1.0218749260607791, + "learning_rate": 8.323680827367027e-06, + "loss": 2.1566, + "step": 822 + }, + { + "epoch": 1.113744075829384, + "grad_norm": 0.9991712465528062, + "learning_rate": 8.319648991160987e-06, + "loss": 1.9646, + "step": 823 + }, + { + "epoch": 1.1150981719702098, + "grad_norm": 0.7872947693815001, + "learning_rate": 8.315613291203977e-06, + "loss": 2.1754, + "step": 824 + }, + { + "epoch": 1.1164522681110358, + "grad_norm": 1.0951406212655457, + "learning_rate": 8.311573732193162e-06, + "loss": 2.3643, + "step": 825 + }, + { + "epoch": 1.1178063642518619, + "grad_norm": 1.162631966134576, + "learning_rate": 8.307530318830195e-06, + "loss": 2.447, + "step": 826 + }, + { + "epoch": 1.119160460392688, + "grad_norm": 1.1057256649020668, + "learning_rate": 8.303483055821222e-06, + "loss": 2.037, + "step": 827 + }, + { + "epoch": 1.120514556533514, + "grad_norm": 1.3428986372890304, + "learning_rate": 8.299431947876868e-06, + "loss": 2.3625, + "step": 828 + }, + { + "epoch": 1.12186865267434, + "grad_norm": 1.1140355194533162, + "learning_rate": 8.29537699971223e-06, + "loss": 2.6255, + "step": 829 + }, + { + "epoch": 1.1232227488151658, + "grad_norm": 1.3177560768420005, + "learning_rate": 8.291318216046874e-06, + "loss": 2.193, + "step": 830 + }, + { + "epoch": 1.1245768449559919, + "grad_norm": 1.0528043127642381, + "learning_rate": 8.287255601604834e-06, + "loss": 1.8949, + "step": 831 + }, + { + "epoch": 1.125930941096818, + "grad_norm": 1.1208525533921565, + "learning_rate": 8.283189161114602e-06, + "loss": 2.8527, + "step": 832 + }, + { + "epoch": 1.127285037237644, + "grad_norm": 1.0730370677327041, + "learning_rate": 8.279118899309121e-06, + "loss": 2.4503, + "step": 833 + }, + { + "epoch": 1.1286391333784698, + "grad_norm": 0.9202040373418652, + "learning_rate": 8.275044820925784e-06, + "loss": 2.066, + "step": 834 + }, + { + "epoch": 1.1299932295192958, + "grad_norm": 1.0579688082270045, + "learning_rate": 8.270966930706424e-06, + "loss": 2.9167, + "step": 835 + }, + { + "epoch": 1.1313473256601219, + "grad_norm": 1.0848914875005917, + "learning_rate": 8.266885233397312e-06, + "loss": 2.8931, + "step": 836 + }, + { + "epoch": 1.132701421800948, + "grad_norm": 0.9167254802739708, + "learning_rate": 8.262799733749147e-06, + "loss": 2.5057, + "step": 837 + }, + { + "epoch": 1.134055517941774, + "grad_norm": 1.066012346340753, + "learning_rate": 8.25871043651706e-06, + "loss": 2.3063, + "step": 838 + }, + { + "epoch": 1.1354096140825998, + "grad_norm": 1.0335819197341622, + "learning_rate": 8.254617346460598e-06, + "loss": 2.333, + "step": 839 + }, + { + "epoch": 1.1367637102234258, + "grad_norm": 1.2740330227860488, + "learning_rate": 8.250520468343722e-06, + "loss": 2.1914, + "step": 840 + }, + { + "epoch": 1.1381178063642519, + "grad_norm": 0.9819332138988384, + "learning_rate": 8.2464198069348e-06, + "loss": 2.2113, + "step": 841 + }, + { + "epoch": 1.139471902505078, + "grad_norm": 0.876882108695887, + "learning_rate": 8.24231536700661e-06, + "loss": 2.0397, + "step": 842 + }, + { + "epoch": 1.140825998645904, + "grad_norm": 1.0295962020138234, + "learning_rate": 8.238207153336322e-06, + "loss": 2.0076, + "step": 843 + }, + { + "epoch": 1.1421800947867298, + "grad_norm": 0.934240502035731, + "learning_rate": 8.2340951707055e-06, + "loss": 2.1013, + "step": 844 + }, + { + "epoch": 1.1435341909275558, + "grad_norm": 1.0551844052623185, + "learning_rate": 8.229979423900095e-06, + "loss": 2.0243, + "step": 845 + }, + { + "epoch": 1.1448882870683819, + "grad_norm": 1.0967095459364826, + "learning_rate": 8.22585991771044e-06, + "loss": 2.1247, + "step": 846 + }, + { + "epoch": 1.146242383209208, + "grad_norm": 1.752846560905714, + "learning_rate": 8.22173665693124e-06, + "loss": 2.0735, + "step": 847 + }, + { + "epoch": 1.147596479350034, + "grad_norm": 1.3734604707116838, + "learning_rate": 8.217609646361574e-06, + "loss": 1.8235, + "step": 848 + }, + { + "epoch": 1.1489505754908598, + "grad_norm": 1.1360548028045079, + "learning_rate": 8.213478890804884e-06, + "loss": 2.4021, + "step": 849 + }, + { + "epoch": 1.1503046716316858, + "grad_norm": 1.0221199810893757, + "learning_rate": 8.209344395068972e-06, + "loss": 1.9002, + "step": 850 + }, + { + "epoch": 1.1516587677725119, + "grad_norm": 0.8652009856618762, + "learning_rate": 8.20520616396599e-06, + "loss": 1.6506, + "step": 851 + }, + { + "epoch": 1.153012863913338, + "grad_norm": 0.8365704134586867, + "learning_rate": 8.20106420231244e-06, + "loss": 2.4525, + "step": 852 + }, + { + "epoch": 1.1543669600541637, + "grad_norm": 1.1184140556213078, + "learning_rate": 8.196918514929166e-06, + "loss": 1.9014, + "step": 853 + }, + { + "epoch": 1.1557210561949898, + "grad_norm": 1.0082190304094705, + "learning_rate": 8.192769106641349e-06, + "loss": 2.3114, + "step": 854 + }, + { + "epoch": 1.1570751523358158, + "grad_norm": 1.2193028904213525, + "learning_rate": 8.188615982278496e-06, + "loss": 2.6133, + "step": 855 + }, + { + "epoch": 1.1584292484766419, + "grad_norm": 1.361443418854842, + "learning_rate": 8.184459146674447e-06, + "loss": 2.4022, + "step": 856 + }, + { + "epoch": 1.159783344617468, + "grad_norm": 1.1918066000555871, + "learning_rate": 8.180298604667354e-06, + "loss": 2.4705, + "step": 857 + }, + { + "epoch": 1.161137440758294, + "grad_norm": 1.061895871052199, + "learning_rate": 8.17613436109969e-06, + "loss": 2.2618, + "step": 858 + }, + { + "epoch": 1.1624915368991198, + "grad_norm": 1.107285284892013, + "learning_rate": 8.171966420818227e-06, + "loss": 2.2433, + "step": 859 + }, + { + "epoch": 1.1638456330399458, + "grad_norm": 1.1870517898017734, + "learning_rate": 8.167794788674051e-06, + "loss": 2.2089, + "step": 860 + }, + { + "epoch": 1.1651997291807719, + "grad_norm": 0.9482677548752911, + "learning_rate": 8.163619469522537e-06, + "loss": 2.2702, + "step": 861 + }, + { + "epoch": 1.166553825321598, + "grad_norm": 0.9617964781357382, + "learning_rate": 8.159440468223352e-06, + "loss": 1.9168, + "step": 862 + }, + { + "epoch": 1.1679079214624237, + "grad_norm": 0.9780264000997955, + "learning_rate": 8.155257789640452e-06, + "loss": 2.1226, + "step": 863 + }, + { + "epoch": 1.1692620176032498, + "grad_norm": 1.0030932897097675, + "learning_rate": 8.15107143864207e-06, + "loss": 2.2428, + "step": 864 + }, + { + "epoch": 1.1706161137440758, + "grad_norm": 1.0659315727721794, + "learning_rate": 8.146881420100715e-06, + "loss": 2.3583, + "step": 865 + }, + { + "epoch": 1.1719702098849019, + "grad_norm": 1.1995965464770133, + "learning_rate": 8.142687738893161e-06, + "loss": 2.1148, + "step": 866 + }, + { + "epoch": 1.173324306025728, + "grad_norm": 1.0100674325958858, + "learning_rate": 8.138490399900454e-06, + "loss": 1.7792, + "step": 867 + }, + { + "epoch": 1.174678402166554, + "grad_norm": 1.1528480305040367, + "learning_rate": 8.134289408007887e-06, + "loss": 2.2195, + "step": 868 + }, + { + "epoch": 1.1760324983073798, + "grad_norm": 0.9508957577380588, + "learning_rate": 8.130084768105011e-06, + "loss": 1.5612, + "step": 869 + }, + { + "epoch": 1.1773865944482058, + "grad_norm": 1.0003595585872833, + "learning_rate": 8.12587648508562e-06, + "loss": 1.8605, + "step": 870 + }, + { + "epoch": 1.1787406905890319, + "grad_norm": 1.1420205399905372, + "learning_rate": 8.12166456384775e-06, + "loss": 2.0476, + "step": 871 + }, + { + "epoch": 1.180094786729858, + "grad_norm": 1.091147689337014, + "learning_rate": 8.117449009293668e-06, + "loss": 2.238, + "step": 872 + }, + { + "epoch": 1.1814488828706837, + "grad_norm": 1.1033755210296698, + "learning_rate": 8.113229826329876e-06, + "loss": 2.3103, + "step": 873 + }, + { + "epoch": 1.1828029790115098, + "grad_norm": 0.9843912490480584, + "learning_rate": 8.109007019867097e-06, + "loss": 2.2559, + "step": 874 + }, + { + "epoch": 1.1841570751523358, + "grad_norm": 1.0569456356202054, + "learning_rate": 8.104780594820265e-06, + "loss": 2.0617, + "step": 875 + }, + { + "epoch": 1.1855111712931619, + "grad_norm": 0.9431626809735268, + "learning_rate": 8.100550556108534e-06, + "loss": 2.216, + "step": 876 + }, + { + "epoch": 1.186865267433988, + "grad_norm": 0.9619228085042159, + "learning_rate": 8.09631690865526e-06, + "loss": 2.6777, + "step": 877 + }, + { + "epoch": 1.1882193635748137, + "grad_norm": 1.1921389359070207, + "learning_rate": 8.092079657388e-06, + "loss": 2.1743, + "step": 878 + }, + { + "epoch": 1.1895734597156398, + "grad_norm": 0.9898902262989339, + "learning_rate": 8.087838807238506e-06, + "loss": 2.0959, + "step": 879 + }, + { + "epoch": 1.1909275558564658, + "grad_norm": 1.143265112227052, + "learning_rate": 8.083594363142717e-06, + "loss": 1.7081, + "step": 880 + }, + { + "epoch": 1.1922816519972919, + "grad_norm": 1.036462321572088, + "learning_rate": 8.079346330040757e-06, + "loss": 1.7609, + "step": 881 + }, + { + "epoch": 1.193635748138118, + "grad_norm": 1.0246337259282456, + "learning_rate": 8.075094712876925e-06, + "loss": 2.4473, + "step": 882 + }, + { + "epoch": 1.1949898442789437, + "grad_norm": 1.1209881301108675, + "learning_rate": 8.070839516599695e-06, + "loss": 1.7249, + "step": 883 + }, + { + "epoch": 1.1963439404197698, + "grad_norm": 1.128942962423783, + "learning_rate": 8.066580746161705e-06, + "loss": 2.8118, + "step": 884 + }, + { + "epoch": 1.1976980365605958, + "grad_norm": 1.1416681649771092, + "learning_rate": 8.062318406519751e-06, + "loss": 2.3983, + "step": 885 + }, + { + "epoch": 1.1990521327014219, + "grad_norm": 1.1873758496607623, + "learning_rate": 8.058052502634786e-06, + "loss": 1.9744, + "step": 886 + }, + { + "epoch": 1.200406228842248, + "grad_norm": 1.0805673821547641, + "learning_rate": 8.053783039471909e-06, + "loss": 2.4546, + "step": 887 + }, + { + "epoch": 1.2017603249830737, + "grad_norm": 1.0642882838498282, + "learning_rate": 8.049510022000365e-06, + "loss": 2.063, + "step": 888 + }, + { + "epoch": 1.2031144211238998, + "grad_norm": 1.0624621525792766, + "learning_rate": 8.045233455193531e-06, + "loss": 2.1409, + "step": 889 + }, + { + "epoch": 1.2044685172647258, + "grad_norm": 1.2346691816252744, + "learning_rate": 8.040953344028921e-06, + "loss": 2.0547, + "step": 890 + }, + { + "epoch": 1.2058226134055519, + "grad_norm": 0.9557989229824211, + "learning_rate": 8.03666969348817e-06, + "loss": 1.9712, + "step": 891 + }, + { + "epoch": 1.2071767095463777, + "grad_norm": 0.8441961698009585, + "learning_rate": 8.032382508557033e-06, + "loss": 2.2598, + "step": 892 + }, + { + "epoch": 1.2085308056872037, + "grad_norm": 1.009678392621935, + "learning_rate": 8.02809179422538e-06, + "loss": 2.2036, + "step": 893 + }, + { + "epoch": 1.2098849018280298, + "grad_norm": 1.4057498442542968, + "learning_rate": 8.023797555487188e-06, + "loss": 2.648, + "step": 894 + }, + { + "epoch": 1.2112389979688558, + "grad_norm": 1.1266529635992393, + "learning_rate": 8.019499797340537e-06, + "loss": 2.0414, + "step": 895 + }, + { + "epoch": 1.2125930941096819, + "grad_norm": 1.2136121598603382, + "learning_rate": 8.015198524787603e-06, + "loss": 2.2459, + "step": 896 + }, + { + "epoch": 1.213947190250508, + "grad_norm": 1.1213074117332114, + "learning_rate": 8.01089374283465e-06, + "loss": 2.2054, + "step": 897 + }, + { + "epoch": 1.2153012863913337, + "grad_norm": 1.0818726596445376, + "learning_rate": 8.00658545649203e-06, + "loss": 2.2397, + "step": 898 + }, + { + "epoch": 1.2166553825321598, + "grad_norm": 1.0597814871253108, + "learning_rate": 8.002273670774172e-06, + "loss": 2.4344, + "step": 899 + }, + { + "epoch": 1.2180094786729858, + "grad_norm": 1.0180557577675222, + "learning_rate": 7.99795839069958e-06, + "loss": 1.9542, + "step": 900 + }, + { + "epoch": 1.2193635748138119, + "grad_norm": 1.0732743540144358, + "learning_rate": 7.99363962129082e-06, + "loss": 1.9053, + "step": 901 + }, + { + "epoch": 1.2207176709546377, + "grad_norm": 1.1464961982270008, + "learning_rate": 7.989317367574528e-06, + "loss": 2.2012, + "step": 902 + }, + { + "epoch": 1.2220717670954637, + "grad_norm": 0.990300248509241, + "learning_rate": 7.984991634581384e-06, + "loss": 1.9006, + "step": 903 + }, + { + "epoch": 1.2234258632362898, + "grad_norm": 1.0378186156083944, + "learning_rate": 7.980662427346127e-06, + "loss": 1.8861, + "step": 904 + }, + { + "epoch": 1.2247799593771158, + "grad_norm": 4.430236914366399, + "learning_rate": 7.976329750907536e-06, + "loss": 2.1467, + "step": 905 + }, + { + "epoch": 1.2261340555179419, + "grad_norm": 1.0098877057349103, + "learning_rate": 7.971993610308428e-06, + "loss": 2.0393, + "step": 906 + }, + { + "epoch": 1.2274881516587677, + "grad_norm": 1.0949603281546505, + "learning_rate": 7.967654010595653e-06, + "loss": 2.2995, + "step": 907 + }, + { + "epoch": 1.2288422477995937, + "grad_norm": 1.1108897890970795, + "learning_rate": 7.963310956820085e-06, + "loss": 2.2542, + "step": 908 + }, + { + "epoch": 1.2301963439404198, + "grad_norm": 1.1882474635276283, + "learning_rate": 7.95896445403662e-06, + "loss": 1.9627, + "step": 909 + }, + { + "epoch": 1.2315504400812458, + "grad_norm": 1.0695557890668739, + "learning_rate": 7.954614507304166e-06, + "loss": 2.8768, + "step": 910 + }, + { + "epoch": 1.2329045362220719, + "grad_norm": 1.1617487541356466, + "learning_rate": 7.950261121685642e-06, + "loss": 2.1828, + "step": 911 + }, + { + "epoch": 1.2342586323628977, + "grad_norm": 1.1384341697301983, + "learning_rate": 7.945904302247968e-06, + "loss": 2.3497, + "step": 912 + }, + { + "epoch": 1.2356127285037237, + "grad_norm": 1.0784446459158834, + "learning_rate": 7.941544054062065e-06, + "loss": 2.2683, + "step": 913 + }, + { + "epoch": 1.2369668246445498, + "grad_norm": 1.038286976843538, + "learning_rate": 7.937180382202836e-06, + "loss": 2.2668, + "step": 914 + }, + { + "epoch": 1.2383209207853758, + "grad_norm": 1.0029564115724932, + "learning_rate": 7.932813291749177e-06, + "loss": 2.0261, + "step": 915 + }, + { + "epoch": 1.2396750169262019, + "grad_norm": 1.0333541710726606, + "learning_rate": 7.928442787783958e-06, + "loss": 2.2212, + "step": 916 + }, + { + "epoch": 1.2410291130670277, + "grad_norm": 1.03717078747682, + "learning_rate": 7.924068875394024e-06, + "loss": 2.0631, + "step": 917 + }, + { + "epoch": 1.2423832092078537, + "grad_norm": 0.9341937079507571, + "learning_rate": 7.919691559670188e-06, + "loss": 2.0421, + "step": 918 + }, + { + "epoch": 1.2437373053486798, + "grad_norm": 1.1684039509398019, + "learning_rate": 7.915310845707223e-06, + "loss": 2.0931, + "step": 919 + }, + { + "epoch": 1.2450914014895058, + "grad_norm": 0.9994376608594767, + "learning_rate": 7.910926738603855e-06, + "loss": 2.6169, + "step": 920 + }, + { + "epoch": 1.2464454976303316, + "grad_norm": 1.3781109917383612, + "learning_rate": 7.906539243462763e-06, + "loss": 2.8901, + "step": 921 + }, + { + "epoch": 1.2477995937711577, + "grad_norm": 1.1519867240702086, + "learning_rate": 7.902148365390567e-06, + "loss": 2.3781, + "step": 922 + }, + { + "epoch": 1.2491536899119837, + "grad_norm": 1.2921377062772406, + "learning_rate": 7.897754109497826e-06, + "loss": 2.4671, + "step": 923 + }, + { + "epoch": 1.2505077860528098, + "grad_norm": 1.0832508824791298, + "learning_rate": 7.89335648089903e-06, + "loss": 2.3355, + "step": 924 + }, + { + "epoch": 1.2518618821936358, + "grad_norm": 1.156819119690463, + "learning_rate": 7.888955484712592e-06, + "loss": 2.6164, + "step": 925 + }, + { + "epoch": 1.2532159783344619, + "grad_norm": 1.0295051950872909, + "learning_rate": 7.88455112606085e-06, + "loss": 1.9817, + "step": 926 + }, + { + "epoch": 1.2545700744752877, + "grad_norm": 0.9867097354242217, + "learning_rate": 7.880143410070051e-06, + "loss": 1.9863, + "step": 927 + }, + { + "epoch": 1.2559241706161137, + "grad_norm": 1.1107662231325113, + "learning_rate": 7.875732341870349e-06, + "loss": 2.0024, + "step": 928 + }, + { + "epoch": 1.2572782667569398, + "grad_norm": 0.9832002800266002, + "learning_rate": 7.871317926595804e-06, + "loss": 2.1088, + "step": 929 + }, + { + "epoch": 1.2586323628977658, + "grad_norm": 0.9422090734118035, + "learning_rate": 7.866900169384368e-06, + "loss": 2.4123, + "step": 930 + }, + { + "epoch": 1.2599864590385916, + "grad_norm": 1.0997264185351083, + "learning_rate": 7.862479075377884e-06, + "loss": 2.2158, + "step": 931 + }, + { + "epoch": 1.2613405551794177, + "grad_norm": 1.1188615261512294, + "learning_rate": 7.85805464972208e-06, + "loss": 2.3005, + "step": 932 + }, + { + "epoch": 1.2626946513202437, + "grad_norm": 1.0299930093784284, + "learning_rate": 7.85362689756656e-06, + "loss": 2.2651, + "step": 933 + }, + { + "epoch": 1.2640487474610698, + "grad_norm": 0.9937695521744077, + "learning_rate": 7.849195824064797e-06, + "loss": 2.2419, + "step": 934 + }, + { + "epoch": 1.2654028436018958, + "grad_norm": 1.0175704885653492, + "learning_rate": 7.844761434374138e-06, + "loss": 2.2974, + "step": 935 + }, + { + "epoch": 1.2667569397427219, + "grad_norm": 1.1723623677027686, + "learning_rate": 7.84032373365578e-06, + "loss": 2.0875, + "step": 936 + }, + { + "epoch": 1.2681110358835477, + "grad_norm": 1.2528863590890928, + "learning_rate": 7.835882727074779e-06, + "loss": 2.1546, + "step": 937 + }, + { + "epoch": 1.2694651320243737, + "grad_norm": 1.3110354157785997, + "learning_rate": 7.831438419800042e-06, + "loss": 2.1202, + "step": 938 + }, + { + "epoch": 1.2708192281651998, + "grad_norm": 1.0304392874864414, + "learning_rate": 7.826990817004307e-06, + "loss": 2.1668, + "step": 939 + }, + { + "epoch": 1.2721733243060258, + "grad_norm": 1.0116042176431888, + "learning_rate": 7.82253992386416e-06, + "loss": 2.2464, + "step": 940 + }, + { + "epoch": 1.2735274204468516, + "grad_norm": 0.9962700643735357, + "learning_rate": 7.818085745560009e-06, + "loss": 2.2486, + "step": 941 + }, + { + "epoch": 1.2748815165876777, + "grad_norm": 1.7303323063471459, + "learning_rate": 7.813628287276087e-06, + "loss": 2.2405, + "step": 942 + }, + { + "epoch": 1.2762356127285037, + "grad_norm": 1.0526531855129573, + "learning_rate": 7.809167554200446e-06, + "loss": 2.4385, + "step": 943 + }, + { + "epoch": 1.2775897088693298, + "grad_norm": 0.9488843491039597, + "learning_rate": 7.804703551524948e-06, + "loss": 2.0235, + "step": 944 + }, + { + "epoch": 1.2789438050101558, + "grad_norm": 1.0271145865465217, + "learning_rate": 7.800236284445262e-06, + "loss": 1.9179, + "step": 945 + }, + { + "epoch": 1.2802979011509819, + "grad_norm": 1.0978694506737823, + "learning_rate": 7.795765758160855e-06, + "loss": 2.6674, + "step": 946 + }, + { + "epoch": 1.2816519972918077, + "grad_norm": 0.9144971550877312, + "learning_rate": 7.791291977874989e-06, + "loss": 2.1267, + "step": 947 + }, + { + "epoch": 1.2830060934326337, + "grad_norm": 1.0316859906377736, + "learning_rate": 7.78681494879471e-06, + "loss": 2.2941, + "step": 948 + }, + { + "epoch": 1.2843601895734598, + "grad_norm": 1.2313895009261298, + "learning_rate": 7.78233467613085e-06, + "loss": 2.1893, + "step": 949 + }, + { + "epoch": 1.2857142857142856, + "grad_norm": 1.158158610918687, + "learning_rate": 7.777851165098012e-06, + "loss": 1.8157, + "step": 950 + }, + { + "epoch": 1.2870683818551116, + "grad_norm": 1.0349810102864025, + "learning_rate": 7.773364420914572e-06, + "loss": 2.4442, + "step": 951 + }, + { + "epoch": 1.2884224779959377, + "grad_norm": 1.1215212355798856, + "learning_rate": 7.768874448802665e-06, + "loss": 2.1174, + "step": 952 + }, + { + "epoch": 1.2897765741367637, + "grad_norm": 0.9965099934668875, + "learning_rate": 7.764381253988189e-06, + "loss": 2.5512, + "step": 953 + }, + { + "epoch": 1.2911306702775898, + "grad_norm": 1.104679475403435, + "learning_rate": 7.759884841700785e-06, + "loss": 2.0502, + "step": 954 + }, + { + "epoch": 1.2924847664184158, + "grad_norm": 1.660982051919665, + "learning_rate": 7.755385217173845e-06, + "loss": 2.4249, + "step": 955 + }, + { + "epoch": 1.2938388625592416, + "grad_norm": 1.0755785420417305, + "learning_rate": 7.750882385644495e-06, + "loss": 2.3396, + "step": 956 + }, + { + "epoch": 1.2951929587000677, + "grad_norm": 1.0925433109915934, + "learning_rate": 7.746376352353599e-06, + "loss": 2.249, + "step": 957 + }, + { + "epoch": 1.2965470548408937, + "grad_norm": 0.9542648024212192, + "learning_rate": 7.741867122545746e-06, + "loss": 2.2574, + "step": 958 + }, + { + "epoch": 1.2979011509817198, + "grad_norm": 1.1252306171349105, + "learning_rate": 7.73735470146924e-06, + "loss": 2.1923, + "step": 959 + }, + { + "epoch": 1.2992552471225456, + "grad_norm": 1.0754233683086076, + "learning_rate": 7.732839094376106e-06, + "loss": 2.0116, + "step": 960 + }, + { + "epoch": 1.3006093432633716, + "grad_norm": 1.1306744896780743, + "learning_rate": 7.728320306522074e-06, + "loss": 2.3124, + "step": 961 + }, + { + "epoch": 1.3019634394041977, + "grad_norm": 1.072223164700719, + "learning_rate": 7.723798343166578e-06, + "loss": 2.0477, + "step": 962 + }, + { + "epoch": 1.3033175355450237, + "grad_norm": 1.107055290773056, + "learning_rate": 7.719273209572745e-06, + "loss": 2.029, + "step": 963 + }, + { + "epoch": 1.3046716316858498, + "grad_norm": 1.0396566849531226, + "learning_rate": 7.714744911007395e-06, + "loss": 2.1424, + "step": 964 + }, + { + "epoch": 1.3060257278266758, + "grad_norm": 1.1282128940500473, + "learning_rate": 7.710213452741028e-06, + "loss": 2.3644, + "step": 965 + }, + { + "epoch": 1.3073798239675016, + "grad_norm": 1.0660837047209504, + "learning_rate": 7.70567884004783e-06, + "loss": 2.3568, + "step": 966 + }, + { + "epoch": 1.3087339201083277, + "grad_norm": 1.0098446863045805, + "learning_rate": 7.701141078205648e-06, + "loss": 2.2343, + "step": 967 + }, + { + "epoch": 1.3100880162491537, + "grad_norm": 9.794773052164214, + "learning_rate": 7.696600172495997e-06, + "loss": 2.7452, + "step": 968 + }, + { + "epoch": 1.3114421123899798, + "grad_norm": 1.3318037186474474, + "learning_rate": 7.692056128204056e-06, + "loss": 1.8471, + "step": 969 + }, + { + "epoch": 1.3127962085308056, + "grad_norm": 1.151203349471892, + "learning_rate": 7.687508950618654e-06, + "loss": 2.6144, + "step": 970 + }, + { + "epoch": 1.3141503046716316, + "grad_norm": 1.0959293878977527, + "learning_rate": 7.682958645032265e-06, + "loss": 2.22, + "step": 971 + }, + { + "epoch": 1.3155044008124577, + "grad_norm": 1.0050631329121955, + "learning_rate": 7.678405216741008e-06, + "loss": 2.4293, + "step": 972 + }, + { + "epoch": 1.3168584969532837, + "grad_norm": 1.0613207475015503, + "learning_rate": 7.67384867104463e-06, + "loss": 1.9871, + "step": 973 + }, + { + "epoch": 1.3182125930941098, + "grad_norm": 1.1967438926432024, + "learning_rate": 7.669289013246512e-06, + "loss": 2.085, + "step": 974 + }, + { + "epoch": 1.3195666892349358, + "grad_norm": 1.2081442687520356, + "learning_rate": 7.664726248653658e-06, + "loss": 2.1026, + "step": 975 + }, + { + "epoch": 1.3209207853757616, + "grad_norm": 1.117288478243434, + "learning_rate": 7.660160382576683e-06, + "loss": 2.274, + "step": 976 + }, + { + "epoch": 1.3222748815165877, + "grad_norm": 1.146272553819504, + "learning_rate": 7.655591420329816e-06, + "loss": 2.3935, + "step": 977 + }, + { + "epoch": 1.3236289776574137, + "grad_norm": 1.658633099303849, + "learning_rate": 7.651019367230886e-06, + "loss": 2.0573, + "step": 978 + }, + { + "epoch": 1.3249830737982395, + "grad_norm": 1.4532457972038215, + "learning_rate": 7.646444228601321e-06, + "loss": 2.0818, + "step": 979 + }, + { + "epoch": 1.3263371699390656, + "grad_norm": 0.9729335641505342, + "learning_rate": 7.641866009766145e-06, + "loss": 2.1297, + "step": 980 + }, + { + "epoch": 1.3276912660798916, + "grad_norm": 1.0010902233959411, + "learning_rate": 7.637284716053959e-06, + "loss": 1.9135, + "step": 981 + }, + { + "epoch": 1.3290453622207177, + "grad_norm": 1.1644542386857344, + "learning_rate": 7.63270035279695e-06, + "loss": 2.0995, + "step": 982 + }, + { + "epoch": 1.3303994583615437, + "grad_norm": 1.17699731802077, + "learning_rate": 7.628112925330867e-06, + "loss": 2.3395, + "step": 983 + }, + { + "epoch": 1.3317535545023698, + "grad_norm": 1.9239341747345802, + "learning_rate": 7.62352243899504e-06, + "loss": 2.2552, + "step": 984 + }, + { + "epoch": 1.3331076506431956, + "grad_norm": 1.104296574087454, + "learning_rate": 7.6189288991323505e-06, + "loss": 2.12, + "step": 985 + }, + { + "epoch": 1.3344617467840216, + "grad_norm": 1.1563231154836133, + "learning_rate": 7.614332311089234e-06, + "loss": 2.4421, + "step": 986 + }, + { + "epoch": 1.3358158429248477, + "grad_norm": 1.1224897324302643, + "learning_rate": 7.609732680215676e-06, + "loss": 2.5472, + "step": 987 + }, + { + "epoch": 1.3371699390656737, + "grad_norm": 1.0798698576819643, + "learning_rate": 7.605130011865201e-06, + "loss": 1.9996, + "step": 988 + }, + { + "epoch": 1.3385240352064995, + "grad_norm": 1.1427247534168647, + "learning_rate": 7.600524311394873e-06, + "loss": 2.4247, + "step": 989 + }, + { + "epoch": 1.3398781313473256, + "grad_norm": 1.1436870392125549, + "learning_rate": 7.595915584165283e-06, + "loss": 2.2447, + "step": 990 + }, + { + "epoch": 1.3412322274881516, + "grad_norm": 1.0123472207553952, + "learning_rate": 7.591303835540544e-06, + "loss": 2.5301, + "step": 991 + }, + { + "epoch": 1.3425863236289777, + "grad_norm": 1.2308969840782176, + "learning_rate": 7.586689070888284e-06, + "loss": 2.4497, + "step": 992 + }, + { + "epoch": 1.3439404197698037, + "grad_norm": 1.2549119339933823, + "learning_rate": 7.582071295579647e-06, + "loss": 2.4945, + "step": 993 + }, + { + "epoch": 1.3452945159106298, + "grad_norm": 1.1551978030637418, + "learning_rate": 7.577450514989275e-06, + "loss": 1.7533, + "step": 994 + }, + { + "epoch": 1.3466486120514556, + "grad_norm": 0.8995145500568735, + "learning_rate": 7.572826734495313e-06, + "loss": 2.0904, + "step": 995 + }, + { + "epoch": 1.3480027081922816, + "grad_norm": 1.0456560935840085, + "learning_rate": 7.568199959479393e-06, + "loss": 2.3237, + "step": 996 + }, + { + "epoch": 1.3493568043331077, + "grad_norm": 1.1047521602796329, + "learning_rate": 7.563570195326635e-06, + "loss": 2.2112, + "step": 997 + }, + { + "epoch": 1.3507109004739337, + "grad_norm": 1.0312134506506243, + "learning_rate": 7.558937447425638e-06, + "loss": 1.9692, + "step": 998 + }, + { + "epoch": 1.3520649966147595, + "grad_norm": 1.114956621453039, + "learning_rate": 7.5543017211684745e-06, + "loss": 2.5477, + "step": 999 + }, + { + "epoch": 1.3534190927555856, + "grad_norm": 1.0810952077309708, + "learning_rate": 7.5496630219506805e-06, + "loss": 2.5975, + "step": 1000 + }, + { + "epoch": 1.3547731888964116, + "grad_norm": 1.0751552768256476, + "learning_rate": 7.545021355171253e-06, + "loss": 2.4863, + "step": 1001 + }, + { + "epoch": 1.3561272850372377, + "grad_norm": 1.1285166971489493, + "learning_rate": 7.540376726232648e-06, + "loss": 2.1198, + "step": 1002 + }, + { + "epoch": 1.3574813811780637, + "grad_norm": 1.3021965465577348, + "learning_rate": 7.535729140540762e-06, + "loss": 2.5985, + "step": 1003 + }, + { + "epoch": 1.3588354773188898, + "grad_norm": 1.1282925324454272, + "learning_rate": 7.531078603504938e-06, + "loss": 2.4989, + "step": 1004 + }, + { + "epoch": 1.3601895734597156, + "grad_norm": 1.1221756439884651, + "learning_rate": 7.52642512053795e-06, + "loss": 2.2481, + "step": 1005 + }, + { + "epoch": 1.3615436696005416, + "grad_norm": 1.057396172107965, + "learning_rate": 7.521768697056004e-06, + "loss": 2.181, + "step": 1006 + }, + { + "epoch": 1.3628977657413677, + "grad_norm": 1.0186231893170619, + "learning_rate": 7.517109338478729e-06, + "loss": 2.1148, + "step": 1007 + }, + { + "epoch": 1.3642518618821937, + "grad_norm": 0.996322134726628, + "learning_rate": 7.512447050229166e-06, + "loss": 2.0567, + "step": 1008 + }, + { + "epoch": 1.3656059580230195, + "grad_norm": 1.0628848521017902, + "learning_rate": 7.507781837733771e-06, + "loss": 2.0321, + "step": 1009 + }, + { + "epoch": 1.3669600541638456, + "grad_norm": 1.2154305322289178, + "learning_rate": 7.503113706422398e-06, + "loss": 2.6109, + "step": 1010 + }, + { + "epoch": 1.3683141503046716, + "grad_norm": 1.2116773535315877, + "learning_rate": 7.498442661728305e-06, + "loss": 2.2339, + "step": 1011 + }, + { + "epoch": 1.3696682464454977, + "grad_norm": 1.0098078280775096, + "learning_rate": 7.493768709088133e-06, + "loss": 2.9048, + "step": 1012 + }, + { + "epoch": 1.3710223425863237, + "grad_norm": 0.9136677144647644, + "learning_rate": 7.489091853941914e-06, + "loss": 1.5463, + "step": 1013 + }, + { + "epoch": 1.3723764387271498, + "grad_norm": 1.075788414941271, + "learning_rate": 7.4844121017330575e-06, + "loss": 2.0282, + "step": 1014 + }, + { + "epoch": 1.3737305348679756, + "grad_norm": 0.9532034628399626, + "learning_rate": 7.4797294579083405e-06, + "loss": 2.0474, + "step": 1015 + }, + { + "epoch": 1.3750846310088016, + "grad_norm": 1.0957243116565307, + "learning_rate": 7.475043927917908e-06, + "loss": 2.1565, + "step": 1016 + }, + { + "epoch": 1.3764387271496277, + "grad_norm": 1.0904772853908138, + "learning_rate": 7.470355517215267e-06, + "loss": 2.3966, + "step": 1017 + }, + { + "epoch": 1.3777928232904535, + "grad_norm": 1.3101240056799253, + "learning_rate": 7.4656642312572734e-06, + "loss": 2.1686, + "step": 1018 + }, + { + "epoch": 1.3791469194312795, + "grad_norm": 1.7456227009220926, + "learning_rate": 7.460970075504132e-06, + "loss": 2.2421, + "step": 1019 + }, + { + "epoch": 1.3805010155721056, + "grad_norm": 1.1602149134117044, + "learning_rate": 7.4562730554193875e-06, + "loss": 2.5002, + "step": 1020 + }, + { + "epoch": 1.3818551117129316, + "grad_norm": 1.292010568667381, + "learning_rate": 7.451573176469916e-06, + "loss": 2.4289, + "step": 1021 + }, + { + "epoch": 1.3832092078537577, + "grad_norm": 1.135185428169095, + "learning_rate": 7.446870444125926e-06, + "loss": 2.5667, + "step": 1022 + }, + { + "epoch": 1.3845633039945837, + "grad_norm": 1.0225658545428182, + "learning_rate": 7.442164863860939e-06, + "loss": 2.0494, + "step": 1023 + }, + { + "epoch": 1.3859174001354095, + "grad_norm": 1.057697531883615, + "learning_rate": 7.4374564411518e-06, + "loss": 1.9321, + "step": 1024 + }, + { + "epoch": 1.3872714962762356, + "grad_norm": 0.9672332524393876, + "learning_rate": 7.432745181478659e-06, + "loss": 2.3441, + "step": 1025 + }, + { + "epoch": 1.3886255924170616, + "grad_norm": 1.1227113972766776, + "learning_rate": 7.428031090324966e-06, + "loss": 3.3247, + "step": 1026 + }, + { + "epoch": 1.3899796885578877, + "grad_norm": 1.158636683545892, + "learning_rate": 7.423314173177467e-06, + "loss": 2.4228, + "step": 1027 + }, + { + "epoch": 1.3913337846987135, + "grad_norm": 1.050811409742003, + "learning_rate": 7.4185944355261996e-06, + "loss": 2.2984, + "step": 1028 + }, + { + "epoch": 1.3926878808395395, + "grad_norm": 1.130180934126097, + "learning_rate": 7.413871882864483e-06, + "loss": 2.0937, + "step": 1029 + }, + { + "epoch": 1.3940419769803656, + "grad_norm": 1.1323994964140216, + "learning_rate": 7.4091465206889115e-06, + "loss": 1.8611, + "step": 1030 + }, + { + "epoch": 1.3953960731211916, + "grad_norm": 1.1272097248462323, + "learning_rate": 7.404418354499352e-06, + "loss": 2.3052, + "step": 1031 + }, + { + "epoch": 1.3967501692620177, + "grad_norm": 1.0357729628233883, + "learning_rate": 7.399687389798933e-06, + "loss": 1.8565, + "step": 1032 + }, + { + "epoch": 1.3981042654028437, + "grad_norm": 1.0211169637817514, + "learning_rate": 7.39495363209404e-06, + "loss": 2.1472, + "step": 1033 + }, + { + "epoch": 1.3994583615436695, + "grad_norm": 1.0575088136735646, + "learning_rate": 7.390217086894309e-06, + "loss": 1.925, + "step": 1034 + }, + { + "epoch": 1.4008124576844956, + "grad_norm": 1.153744768232039, + "learning_rate": 7.385477759712625e-06, + "loss": 2.6485, + "step": 1035 + }, + { + "epoch": 1.4021665538253216, + "grad_norm": 1.1427109111587352, + "learning_rate": 7.380735656065103e-06, + "loss": 2.0497, + "step": 1036 + }, + { + "epoch": 1.4035206499661477, + "grad_norm": 1.1900700043389405, + "learning_rate": 7.375990781471098e-06, + "loss": 2.3127, + "step": 1037 + }, + { + "epoch": 1.4048747461069735, + "grad_norm": 1.1628543548305619, + "learning_rate": 7.371243141453185e-06, + "loss": 2.0708, + "step": 1038 + }, + { + "epoch": 1.4062288422477995, + "grad_norm": 1.1557498507007162, + "learning_rate": 7.366492741537156e-06, + "loss": 2.0788, + "step": 1039 + }, + { + "epoch": 1.4075829383886256, + "grad_norm": 1.0074214592668214, + "learning_rate": 7.361739587252019e-06, + "loss": 2.2388, + "step": 1040 + }, + { + "epoch": 1.4089370345294516, + "grad_norm": 0.9850303377628948, + "learning_rate": 7.3569836841299905e-06, + "loss": 2.3422, + "step": 1041 + }, + { + "epoch": 1.4102911306702777, + "grad_norm": 0.9362862055958489, + "learning_rate": 7.352225037706476e-06, + "loss": 1.9123, + "step": 1042 + }, + { + "epoch": 1.4116452268111037, + "grad_norm": 1.0225754131079985, + "learning_rate": 7.347463653520086e-06, + "loss": 2.1162, + "step": 1043 + }, + { + "epoch": 1.4129993229519295, + "grad_norm": 1.1046568630708087, + "learning_rate": 7.34269953711261e-06, + "loss": 2.744, + "step": 1044 + }, + { + "epoch": 1.4143534190927556, + "grad_norm": 1.0769083803347521, + "learning_rate": 7.337932694029018e-06, + "loss": 2.1198, + "step": 1045 + }, + { + "epoch": 1.4157075152335816, + "grad_norm": 0.9800626580167843, + "learning_rate": 7.3331631298174596e-06, + "loss": 2.3196, + "step": 1046 + }, + { + "epoch": 1.4170616113744074, + "grad_norm": 1.0549618215499794, + "learning_rate": 7.3283908500292434e-06, + "loss": 2.1395, + "step": 1047 + }, + { + "epoch": 1.4184157075152335, + "grad_norm": 1.060574227209818, + "learning_rate": 7.323615860218844e-06, + "loss": 2.8704, + "step": 1048 + }, + { + "epoch": 1.4197698036560595, + "grad_norm": 1.0781456745775502, + "learning_rate": 7.3188381659438865e-06, + "loss": 2.2806, + "step": 1049 + }, + { + "epoch": 1.4211238997968856, + "grad_norm": 1.551162578778134, + "learning_rate": 7.314057772765148e-06, + "loss": 2.116, + "step": 1050 + }, + { + "epoch": 1.4224779959377116, + "grad_norm": 1.3756005806034488, + "learning_rate": 7.309274686246547e-06, + "loss": 3.2393, + "step": 1051 + }, + { + "epoch": 1.4238320920785377, + "grad_norm": 1.0635108173294194, + "learning_rate": 7.304488911955129e-06, + "loss": 2.2153, + "step": 1052 + }, + { + "epoch": 1.4251861882193635, + "grad_norm": 1.1523212588317262, + "learning_rate": 7.299700455461076e-06, + "loss": 1.8101, + "step": 1053 + }, + { + "epoch": 1.4265402843601895, + "grad_norm": 1.1351217638799846, + "learning_rate": 7.294909322337689e-06, + "loss": 2.459, + "step": 1054 + }, + { + "epoch": 1.4278943805010156, + "grad_norm": 1.113321093812555, + "learning_rate": 7.290115518161385e-06, + "loss": 1.9065, + "step": 1055 + }, + { + "epoch": 1.4292484766418416, + "grad_norm": 1.1651609776327243, + "learning_rate": 7.28531904851169e-06, + "loss": 2.2955, + "step": 1056 + }, + { + "epoch": 1.4306025727826674, + "grad_norm": 0.9391837640644604, + "learning_rate": 7.280519918971228e-06, + "loss": 2.5399, + "step": 1057 + }, + { + "epoch": 1.4319566689234935, + "grad_norm": 1.1481164415158314, + "learning_rate": 7.275718135125726e-06, + "loss": 2.3987, + "step": 1058 + }, + { + "epoch": 1.4333107650643195, + "grad_norm": 0.9819604190905024, + "learning_rate": 7.270913702563995e-06, + "loss": 2.0484, + "step": 1059 + }, + { + "epoch": 1.4346648612051456, + "grad_norm": 1.241655388978992, + "learning_rate": 7.266106626877933e-06, + "loss": 2.1864, + "step": 1060 + }, + { + "epoch": 1.4360189573459716, + "grad_norm": 1.0531564756435357, + "learning_rate": 7.26129691366251e-06, + "loss": 2.1386, + "step": 1061 + }, + { + "epoch": 1.4373730534867977, + "grad_norm": 1.1218122300065656, + "learning_rate": 7.256484568515769e-06, + "loss": 2.4484, + "step": 1062 + }, + { + "epoch": 1.4387271496276235, + "grad_norm": 1.0001136323518225, + "learning_rate": 7.251669597038814e-06, + "loss": 2.2824, + "step": 1063 + }, + { + "epoch": 1.4400812457684495, + "grad_norm": 1.2896307069641786, + "learning_rate": 7.246852004835807e-06, + "loss": 2.4539, + "step": 1064 + }, + { + "epoch": 1.4414353419092756, + "grad_norm": 1.0510167874878298, + "learning_rate": 7.242031797513962e-06, + "loss": 2.6035, + "step": 1065 + }, + { + "epoch": 1.4427894380501016, + "grad_norm": 1.2155395455915952, + "learning_rate": 7.2372089806835335e-06, + "loss": 1.9087, + "step": 1066 + }, + { + "epoch": 1.4441435341909274, + "grad_norm": 1.1199078701755178, + "learning_rate": 7.232383559957815e-06, + "loss": 2.1759, + "step": 1067 + }, + { + "epoch": 1.4454976303317535, + "grad_norm": 1.159792012820624, + "learning_rate": 7.227555540953131e-06, + "loss": 2.4314, + "step": 1068 + }, + { + "epoch": 1.4468517264725795, + "grad_norm": 1.0730576607550595, + "learning_rate": 7.22272492928883e-06, + "loss": 2.3359, + "step": 1069 + }, + { + "epoch": 1.4482058226134056, + "grad_norm": 0.8222048650029028, + "learning_rate": 7.217891730587276e-06, + "loss": 2.2215, + "step": 1070 + }, + { + "epoch": 1.4495599187542316, + "grad_norm": 1.0285016809620628, + "learning_rate": 7.2130559504738464e-06, + "loss": 2.239, + "step": 1071 + }, + { + "epoch": 1.4509140148950577, + "grad_norm": 1.0693497499216194, + "learning_rate": 7.2082175945769226e-06, + "loss": 2.2506, + "step": 1072 + }, + { + "epoch": 1.4522681110358835, + "grad_norm": 1.1167979192807023, + "learning_rate": 7.203376668527884e-06, + "loss": 2.2712, + "step": 1073 + }, + { + "epoch": 1.4536222071767095, + "grad_norm": 1.1307443613901718, + "learning_rate": 7.198533177961102e-06, + "loss": 2.0017, + "step": 1074 + }, + { + "epoch": 1.4549763033175356, + "grad_norm": 1.0564264494899962, + "learning_rate": 7.19368712851393e-06, + "loss": 2.1056, + "step": 1075 + }, + { + "epoch": 1.4563303994583616, + "grad_norm": 1.1101530483846054, + "learning_rate": 7.188838525826702e-06, + "loss": 2.1903, + "step": 1076 + }, + { + "epoch": 1.4576844955991874, + "grad_norm": 1.0103673641252706, + "learning_rate": 7.183987375542726e-06, + "loss": 2.382, + "step": 1077 + }, + { + "epoch": 1.4590385917400135, + "grad_norm": 1.253353521477901, + "learning_rate": 7.17913368330827e-06, + "loss": 2.0939, + "step": 1078 + }, + { + "epoch": 1.4603926878808395, + "grad_norm": 1.2508634701536092, + "learning_rate": 7.174277454772566e-06, + "loss": 1.9324, + "step": 1079 + }, + { + "epoch": 1.4617467840216656, + "grad_norm": 8.122448301970948, + "learning_rate": 7.169418695587791e-06, + "loss": 2.1723, + "step": 1080 + }, + { + "epoch": 1.4631008801624916, + "grad_norm": 1.3165876873084112, + "learning_rate": 7.164557411409075e-06, + "loss": 2.0454, + "step": 1081 + }, + { + "epoch": 1.4644549763033177, + "grad_norm": 1.3433865812537615, + "learning_rate": 7.1596936078944825e-06, + "loss": 1.5743, + "step": 1082 + }, + { + "epoch": 1.4658090724441435, + "grad_norm": 1.1708156611040208, + "learning_rate": 7.154827290705012e-06, + "loss": 2.7672, + "step": 1083 + }, + { + "epoch": 1.4671631685849695, + "grad_norm": 1.0739066488240625, + "learning_rate": 7.1499584655045866e-06, + "loss": 2.0953, + "step": 1084 + }, + { + "epoch": 1.4685172647257956, + "grad_norm": 1.0810850204219578, + "learning_rate": 7.145087137960047e-06, + "loss": 2.6921, + "step": 1085 + }, + { + "epoch": 1.4698713608666214, + "grad_norm": 0.9939405297389377, + "learning_rate": 7.14021331374115e-06, + "loss": 2.0588, + "step": 1086 + }, + { + "epoch": 1.4712254570074474, + "grad_norm": 0.9226185898131262, + "learning_rate": 7.135336998520557e-06, + "loss": 1.9489, + "step": 1087 + }, + { + "epoch": 1.4725795531482735, + "grad_norm": 1.0783455175484244, + "learning_rate": 7.130458197973828e-06, + "loss": 1.8894, + "step": 1088 + }, + { + "epoch": 1.4739336492890995, + "grad_norm": 1.0986213365125383, + "learning_rate": 7.125576917779414e-06, + "loss": 2.3981, + "step": 1089 + }, + { + "epoch": 1.4752877454299256, + "grad_norm": 0.9907037990576693, + "learning_rate": 7.120693163618656e-06, + "loss": 1.9927, + "step": 1090 + }, + { + "epoch": 1.4766418415707516, + "grad_norm": 1.067381598945353, + "learning_rate": 7.115806941175771e-06, + "loss": 1.9368, + "step": 1091 + }, + { + "epoch": 1.4779959377115774, + "grad_norm": 1.00952047194142, + "learning_rate": 7.1109182561378505e-06, + "loss": 2.1691, + "step": 1092 + }, + { + "epoch": 1.4793500338524035, + "grad_norm": 1.107779870373698, + "learning_rate": 7.106027114194856e-06, + "loss": 1.8634, + "step": 1093 + }, + { + "epoch": 1.4807041299932295, + "grad_norm": 1.1243485392179005, + "learning_rate": 7.101133521039597e-06, + "loss": 1.9522, + "step": 1094 + }, + { + "epoch": 1.4820582261340556, + "grad_norm": 1.0572503465978025, + "learning_rate": 7.09623748236775e-06, + "loss": 1.8363, + "step": 1095 + }, + { + "epoch": 1.4834123222748814, + "grad_norm": 1.0216562069583985, + "learning_rate": 7.091339003877826e-06, + "loss": 1.8552, + "step": 1096 + }, + { + "epoch": 1.4847664184157074, + "grad_norm": 1.089956344974216, + "learning_rate": 7.086438091271186e-06, + "loss": 2.5221, + "step": 1097 + }, + { + "epoch": 1.4861205145565335, + "grad_norm": 1.11408247722042, + "learning_rate": 7.0815347502520185e-06, + "loss": 2.2585, + "step": 1098 + }, + { + "epoch": 1.4874746106973595, + "grad_norm": 0.9710069262269226, + "learning_rate": 7.076628986527335e-06, + "loss": 2.3226, + "step": 1099 + }, + { + "epoch": 1.4888287068381856, + "grad_norm": 1.0917833338553558, + "learning_rate": 7.0717208058069755e-06, + "loss": 1.9066, + "step": 1100 + }, + { + "epoch": 1.4901828029790116, + "grad_norm": 1.1161624453210208, + "learning_rate": 7.066810213803586e-06, + "loss": 2.1559, + "step": 1101 + }, + { + "epoch": 1.4915368991198374, + "grad_norm": 1.16178506916721, + "learning_rate": 7.061897216232622e-06, + "loss": 2.3097, + "step": 1102 + }, + { + "epoch": 1.4928909952606635, + "grad_norm": 1.0384328202163011, + "learning_rate": 7.05698181881234e-06, + "loss": 1.938, + "step": 1103 + }, + { + "epoch": 1.4942450914014895, + "grad_norm": 1.216771459951468, + "learning_rate": 7.052064027263785e-06, + "loss": 2.4431, + "step": 1104 + }, + { + "epoch": 1.4955991875423156, + "grad_norm": 1.3088163468638734, + "learning_rate": 7.047143847310794e-06, + "loss": 2.0962, + "step": 1105 + }, + { + "epoch": 1.4969532836831414, + "grad_norm": 1.1011041050321808, + "learning_rate": 7.042221284679982e-06, + "loss": 1.9349, + "step": 1106 + }, + { + "epoch": 1.4983073798239674, + "grad_norm": 1.2350163332118804, + "learning_rate": 7.037296345100734e-06, + "loss": 1.8952, + "step": 1107 + }, + { + "epoch": 1.4996614759647935, + "grad_norm": 1.0028965498141413, + "learning_rate": 7.032369034305209e-06, + "loss": 2.4324, + "step": 1108 + }, + { + "epoch": 1.5010155721056195, + "grad_norm": 1.0240481324400763, + "learning_rate": 7.027439358028315e-06, + "loss": 2.4041, + "step": 1109 + }, + { + "epoch": 1.5023696682464456, + "grad_norm": 1.2289451784867027, + "learning_rate": 7.022507322007723e-06, + "loss": 2.296, + "step": 1110 + }, + { + "epoch": 1.5037237643872716, + "grad_norm": 1.1846045097053997, + "learning_rate": 7.017572931983846e-06, + "loss": 1.9807, + "step": 1111 + }, + { + "epoch": 1.5050778605280974, + "grad_norm": 1.3679366822185766, + "learning_rate": 7.012636193699838e-06, + "loss": 2.3165, + "step": 1112 + }, + { + "epoch": 1.5064319566689235, + "grad_norm": 1.0735093643875895, + "learning_rate": 7.007697112901586e-06, + "loss": 2.0637, + "step": 1113 + }, + { + "epoch": 1.5077860528097495, + "grad_norm": 0.9805492134387425, + "learning_rate": 7.002755695337703e-06, + "loss": 2.2236, + "step": 1114 + }, + { + "epoch": 1.5091401489505754, + "grad_norm": 1.1540910897325873, + "learning_rate": 6.997811946759522e-06, + "loss": 2.5638, + "step": 1115 + }, + { + "epoch": 1.5104942450914014, + "grad_norm": 1.045564752122656, + "learning_rate": 6.9928658729210885e-06, + "loss": 2.5629, + "step": 1116 + }, + { + "epoch": 1.5118483412322274, + "grad_norm": 1.5327917333436447, + "learning_rate": 6.987917479579156e-06, + "loss": 2.3958, + "step": 1117 + }, + { + "epoch": 1.5132024373730535, + "grad_norm": 1.0061412022965626, + "learning_rate": 6.982966772493176e-06, + "loss": 2.3869, + "step": 1118 + }, + { + "epoch": 1.5145565335138795, + "grad_norm": 1.143443700093151, + "learning_rate": 6.978013757425295e-06, + "loss": 2.2255, + "step": 1119 + }, + { + "epoch": 1.5159106296547056, + "grad_norm": 1.076563769412944, + "learning_rate": 6.973058440140341e-06, + "loss": 2.0117, + "step": 1120 + }, + { + "epoch": 1.5172647257955316, + "grad_norm": 0.9932180957280397, + "learning_rate": 6.96810082640583e-06, + "loss": 2.4315, + "step": 1121 + }, + { + "epoch": 1.5186188219363574, + "grad_norm": 1.7614630897566435, + "learning_rate": 6.963140921991941e-06, + "loss": 1.8154, + "step": 1122 + }, + { + "epoch": 1.5199729180771835, + "grad_norm": 1.0435661710662918, + "learning_rate": 6.958178732671527e-06, + "loss": 2.1161, + "step": 1123 + }, + { + "epoch": 1.5213270142180095, + "grad_norm": 1.034141937866288, + "learning_rate": 6.953214264220095e-06, + "loss": 2.2153, + "step": 1124 + }, + { + "epoch": 1.5226811103588354, + "grad_norm": 1.1333203921847073, + "learning_rate": 6.948247522415811e-06, + "loss": 2.1679, + "step": 1125 + }, + { + "epoch": 1.5240352064996614, + "grad_norm": 1.1523266319281296, + "learning_rate": 6.943278513039477e-06, + "loss": 2.0316, + "step": 1126 + }, + { + "epoch": 1.5253893026404874, + "grad_norm": 1.1175733614955354, + "learning_rate": 6.938307241874547e-06, + "loss": 1.9331, + "step": 1127 + }, + { + "epoch": 1.5267433987813135, + "grad_norm": 1.0816050768685381, + "learning_rate": 6.933333714707094e-06, + "loss": 2.1337, + "step": 1128 + }, + { + "epoch": 1.5280974949221395, + "grad_norm": 0.9555568730660431, + "learning_rate": 6.928357937325829e-06, + "loss": 2.1897, + "step": 1129 + }, + { + "epoch": 1.5294515910629656, + "grad_norm": 1.1330718777414148, + "learning_rate": 6.923379915522075e-06, + "loss": 2.5013, + "step": 1130 + }, + { + "epoch": 1.5308056872037916, + "grad_norm": 1.4860432492374922, + "learning_rate": 6.918399655089768e-06, + "loss": 2.2661, + "step": 1131 + }, + { + "epoch": 1.5321597833446174, + "grad_norm": 1.0608341510421102, + "learning_rate": 6.913417161825449e-06, + "loss": 2.6459, + "step": 1132 + }, + { + "epoch": 1.5335138794854435, + "grad_norm": 1.2535263483596926, + "learning_rate": 6.908432441528262e-06, + "loss": 2.5183, + "step": 1133 + }, + { + "epoch": 1.5348679756262693, + "grad_norm": 1.1881324730918719, + "learning_rate": 6.903445499999939e-06, + "loss": 1.9834, + "step": 1134 + }, + { + "epoch": 1.5362220717670954, + "grad_norm": 0.9936992242467968, + "learning_rate": 6.8984563430448e-06, + "loss": 2.185, + "step": 1135 + }, + { + "epoch": 1.5375761679079214, + "grad_norm": 0.9856859764312084, + "learning_rate": 6.893464976469739e-06, + "loss": 2.413, + "step": 1136 + }, + { + "epoch": 1.5389302640487474, + "grad_norm": 0.9728057769285352, + "learning_rate": 6.888471406084227e-06, + "loss": 2.2291, + "step": 1137 + }, + { + "epoch": 1.5402843601895735, + "grad_norm": 1.9592487450519758, + "learning_rate": 6.883475637700298e-06, + "loss": 2.2332, + "step": 1138 + }, + { + "epoch": 1.5416384563303995, + "grad_norm": 2.989310700259001, + "learning_rate": 6.8784776771325426e-06, + "loss": 2.5173, + "step": 1139 + }, + { + "epoch": 1.5429925524712256, + "grad_norm": 1.0753052749288403, + "learning_rate": 6.873477530198107e-06, + "loss": 2.2734, + "step": 1140 + }, + { + "epoch": 1.5443466486120516, + "grad_norm": 1.1348199790348215, + "learning_rate": 6.868475202716677e-06, + "loss": 3.0435, + "step": 1141 + }, + { + "epoch": 1.5457007447528774, + "grad_norm": 0.9253248443470898, + "learning_rate": 6.863470700510479e-06, + "loss": 2.2282, + "step": 1142 + }, + { + "epoch": 1.5470548408937035, + "grad_norm": 1.0212577294991076, + "learning_rate": 6.858464029404272e-06, + "loss": 1.9986, + "step": 1143 + }, + { + "epoch": 1.5484089370345293, + "grad_norm": 1.0119821954044654, + "learning_rate": 6.8534551952253395e-06, + "loss": 2.236, + "step": 1144 + }, + { + "epoch": 1.5497630331753554, + "grad_norm": 0.9376658731281714, + "learning_rate": 6.848444203803476e-06, + "loss": 1.9798, + "step": 1145 + }, + { + "epoch": 1.5511171293161814, + "grad_norm": 1.0518223950174321, + "learning_rate": 6.843431060970995e-06, + "loss": 2.2833, + "step": 1146 + }, + { + "epoch": 1.5524712254570074, + "grad_norm": 1.6688430719575653, + "learning_rate": 6.838415772562711e-06, + "loss": 2.2911, + "step": 1147 + }, + { + "epoch": 1.5538253215978335, + "grad_norm": 2.211386740791148, + "learning_rate": 6.833398344415933e-06, + "loss": 2.6092, + "step": 1148 + }, + { + "epoch": 1.5551794177386595, + "grad_norm": 1.302197054959585, + "learning_rate": 6.8283787823704685e-06, + "loss": 2.1764, + "step": 1149 + }, + { + "epoch": 1.5565335138794856, + "grad_norm": 1.365025659237606, + "learning_rate": 6.823357092268596e-06, + "loss": 1.9187, + "step": 1150 + }, + { + "epoch": 1.5578876100203114, + "grad_norm": 0.9995629551418774, + "learning_rate": 6.8183332799550836e-06, + "loss": 2.0254, + "step": 1151 + }, + { + "epoch": 1.5592417061611374, + "grad_norm": 1.091795853304648, + "learning_rate": 6.813307351277161e-06, + "loss": 2.5035, + "step": 1152 + }, + { + "epoch": 1.5605958023019635, + "grad_norm": 0.9113959325465628, + "learning_rate": 6.808279312084525e-06, + "loss": 2.2859, + "step": 1153 + }, + { + "epoch": 1.5619498984427893, + "grad_norm": 1.105743242638601, + "learning_rate": 6.803249168229329e-06, + "loss": 1.9762, + "step": 1154 + }, + { + "epoch": 1.5633039945836154, + "grad_norm": 1.3089749040803054, + "learning_rate": 6.798216925566171e-06, + "loss": 2.245, + "step": 1155 + }, + { + "epoch": 1.5646580907244414, + "grad_norm": 1.4155040937471588, + "learning_rate": 6.7931825899521e-06, + "loss": 2.151, + "step": 1156 + }, + { + "epoch": 1.5660121868652674, + "grad_norm": 1.2355148875524264, + "learning_rate": 6.788146167246594e-06, + "loss": 2.2848, + "step": 1157 + }, + { + "epoch": 1.5673662830060935, + "grad_norm": 1.134385408399196, + "learning_rate": 6.783107663311566e-06, + "loss": 1.9886, + "step": 1158 + }, + { + "epoch": 1.5687203791469195, + "grad_norm": 1.1445134576197042, + "learning_rate": 6.7780670840113434e-06, + "loss": 2.2995, + "step": 1159 + }, + { + "epoch": 1.5700744752877456, + "grad_norm": 1.1766480093443303, + "learning_rate": 6.773024435212678e-06, + "loss": 2.2219, + "step": 1160 + }, + { + "epoch": 1.5714285714285714, + "grad_norm": 1.5615268999323522, + "learning_rate": 6.767979722784725e-06, + "loss": 2.0472, + "step": 1161 + }, + { + "epoch": 1.5727826675693974, + "grad_norm": 1.1706173436180667, + "learning_rate": 6.762932952599043e-06, + "loss": 2.4683, + "step": 1162 + }, + { + "epoch": 1.5741367637102233, + "grad_norm": 1.1480375058434151, + "learning_rate": 6.757884130529584e-06, + "loss": 1.9804, + "step": 1163 + }, + { + "epoch": 1.5754908598510493, + "grad_norm": 1.1079469797078965, + "learning_rate": 6.75283326245269e-06, + "loss": 2.3322, + "step": 1164 + }, + { + "epoch": 1.5768449559918754, + "grad_norm": 1.1362279207408492, + "learning_rate": 6.7477803542470834e-06, + "loss": 1.998, + "step": 1165 + }, + { + "epoch": 1.5781990521327014, + "grad_norm": 0.982450102859919, + "learning_rate": 6.742725411793862e-06, + "loss": 2.3529, + "step": 1166 + }, + { + "epoch": 1.5795531482735274, + "grad_norm": 1.1119736244356198, + "learning_rate": 6.737668440976494e-06, + "loss": 2.0098, + "step": 1167 + }, + { + "epoch": 1.5809072444143535, + "grad_norm": 1.1735566109179396, + "learning_rate": 6.7326094476808e-06, + "loss": 2.4483, + "step": 1168 + }, + { + "epoch": 1.5822613405551795, + "grad_norm": 0.9600432883498903, + "learning_rate": 6.727548437794963e-06, + "loss": 2.4039, + "step": 1169 + }, + { + "epoch": 1.5836154366960056, + "grad_norm": 0.9713832736002586, + "learning_rate": 6.722485417209509e-06, + "loss": 2.129, + "step": 1170 + }, + { + "epoch": 1.5849695328368314, + "grad_norm": 1.536541973608301, + "learning_rate": 6.717420391817306e-06, + "loss": 2.0486, + "step": 1171 + }, + { + "epoch": 1.5863236289776574, + "grad_norm": 1.1215408259768265, + "learning_rate": 6.712353367513555e-06, + "loss": 1.8101, + "step": 1172 + }, + { + "epoch": 1.5876777251184833, + "grad_norm": 0.9400933392391618, + "learning_rate": 6.707284350195779e-06, + "loss": 2.129, + "step": 1173 + }, + { + "epoch": 1.5890318212593093, + "grad_norm": 1.0955863112973159, + "learning_rate": 6.70221334576383e-06, + "loss": 1.8608, + "step": 1174 + }, + { + "epoch": 1.5903859174001354, + "grad_norm": 1.290340133465167, + "learning_rate": 6.697140360119867e-06, + "loss": 2.6089, + "step": 1175 + }, + { + "epoch": 1.5917400135409614, + "grad_norm": 1.0935001366917838, + "learning_rate": 6.692065399168352e-06, + "loss": 2.3835, + "step": 1176 + }, + { + "epoch": 1.5930941096817874, + "grad_norm": 1.1310710971297633, + "learning_rate": 6.686988468816055e-06, + "loss": 2.3186, + "step": 1177 + }, + { + "epoch": 1.5944482058226135, + "grad_norm": 1.1106261511672113, + "learning_rate": 6.681909574972028e-06, + "loss": 2.278, + "step": 1178 + }, + { + "epoch": 1.5958023019634395, + "grad_norm": 1.1099983154470332, + "learning_rate": 6.676828723547614e-06, + "loss": 1.8412, + "step": 1179 + }, + { + "epoch": 1.5971563981042654, + "grad_norm": 1.115352426872196, + "learning_rate": 6.671745920456434e-06, + "loss": 2.6848, + "step": 1180 + }, + { + "epoch": 1.5985104942450914, + "grad_norm": 1.9617683740846232, + "learning_rate": 6.666661171614382e-06, + "loss": 2.4141, + "step": 1181 + }, + { + "epoch": 1.5998645903859174, + "grad_norm": 1.0340361767560753, + "learning_rate": 6.661574482939615e-06, + "loss": 2.029, + "step": 1182 + }, + { + "epoch": 1.6012186865267433, + "grad_norm": 1.1104672220605092, + "learning_rate": 6.656485860352544e-06, + "loss": 2.3655, + "step": 1183 + }, + { + "epoch": 1.6025727826675693, + "grad_norm": 1.2030765915104757, + "learning_rate": 6.651395309775837e-06, + "loss": 2.1954, + "step": 1184 + }, + { + "epoch": 1.6039268788083954, + "grad_norm": 1.2573381048562495, + "learning_rate": 6.6463028371344015e-06, + "loss": 2.5574, + "step": 1185 + }, + { + "epoch": 1.6052809749492214, + "grad_norm": 1.1722502953655352, + "learning_rate": 6.641208448355388e-06, + "loss": 1.9912, + "step": 1186 + }, + { + "epoch": 1.6066350710900474, + "grad_norm": 0.9237524987059714, + "learning_rate": 6.63611214936817e-06, + "loss": 1.7042, + "step": 1187 + }, + { + "epoch": 1.6079891672308735, + "grad_norm": 1.0114862987523483, + "learning_rate": 6.631013946104348e-06, + "loss": 2.1457, + "step": 1188 + }, + { + "epoch": 1.6093432633716995, + "grad_norm": 1.2504620286352166, + "learning_rate": 6.625913844497739e-06, + "loss": 2.9242, + "step": 1189 + }, + { + "epoch": 1.6106973595125254, + "grad_norm": 1.0716280700456458, + "learning_rate": 6.620811850484368e-06, + "loss": 2.1747, + "step": 1190 + }, + { + "epoch": 1.6120514556533514, + "grad_norm": 1.0218159941143157, + "learning_rate": 6.6157079700024665e-06, + "loss": 2.1052, + "step": 1191 + }, + { + "epoch": 1.6134055517941774, + "grad_norm": 0.9964735852419978, + "learning_rate": 6.6106022089924535e-06, + "loss": 2.1085, + "step": 1192 + }, + { + "epoch": 1.6147596479350033, + "grad_norm": 1.1164408166829398, + "learning_rate": 6.605494573396947e-06, + "loss": 2.146, + "step": 1193 + }, + { + "epoch": 1.6161137440758293, + "grad_norm": 1.1424753368875373, + "learning_rate": 6.600385069160739e-06, + "loss": 2.8387, + "step": 1194 + }, + { + "epoch": 1.6174678402166554, + "grad_norm": 1.3178474061043657, + "learning_rate": 6.5952737022308e-06, + "loss": 1.902, + "step": 1195 + }, + { + "epoch": 1.6188219363574814, + "grad_norm": 1.10966169321405, + "learning_rate": 6.590160478556269e-06, + "loss": 2.4412, + "step": 1196 + }, + { + "epoch": 1.6201760324983074, + "grad_norm": 1.1587333892126894, + "learning_rate": 6.585045404088442e-06, + "loss": 2.2305, + "step": 1197 + }, + { + "epoch": 1.6215301286391335, + "grad_norm": 1.0092191905987942, + "learning_rate": 6.579928484780773e-06, + "loss": 2.1454, + "step": 1198 + }, + { + "epoch": 1.6228842247799595, + "grad_norm": 1.0732773418214976, + "learning_rate": 6.5748097265888624e-06, + "loss": 2.3336, + "step": 1199 + }, + { + "epoch": 1.6242383209207854, + "grad_norm": 1.174781988020797, + "learning_rate": 6.569689135470451e-06, + "loss": 2.6344, + "step": 1200 + }, + { + "epoch": 1.6255924170616114, + "grad_norm": 1.1510963746609166, + "learning_rate": 6.564566717385412e-06, + "loss": 2.2199, + "step": 1201 + }, + { + "epoch": 1.6269465132024372, + "grad_norm": 0.8890344864420157, + "learning_rate": 6.559442478295745e-06, + "loss": 1.9674, + "step": 1202 + }, + { + "epoch": 1.6283006093432633, + "grad_norm": 1.0562702319805766, + "learning_rate": 6.5543164241655724e-06, + "loss": 2.0231, + "step": 1203 + }, + { + "epoch": 1.6296547054840893, + "grad_norm": 1.1686330096691793, + "learning_rate": 6.549188560961124e-06, + "loss": 2.401, + "step": 1204 + }, + { + "epoch": 1.6310088016249153, + "grad_norm": 0.9835702255574785, + "learning_rate": 6.544058894650736e-06, + "loss": 1.9566, + "step": 1205 + }, + { + "epoch": 1.6323628977657414, + "grad_norm": 1.1799470524015878, + "learning_rate": 6.5389274312048476e-06, + "loss": 2.3821, + "step": 1206 + }, + { + "epoch": 1.6337169939065674, + "grad_norm": 1.0305646424878727, + "learning_rate": 6.533794176595987e-06, + "loss": 1.7532, + "step": 1207 + }, + { + "epoch": 1.6350710900473935, + "grad_norm": 0.9411916034993967, + "learning_rate": 6.5286591367987655e-06, + "loss": 2.1668, + "step": 1208 + }, + { + "epoch": 1.6364251861882195, + "grad_norm": 1.1619947231820393, + "learning_rate": 6.523522317789874e-06, + "loss": 2.0694, + "step": 1209 + }, + { + "epoch": 1.6377792823290453, + "grad_norm": 1.1054945884631686, + "learning_rate": 6.518383725548074e-06, + "loss": 2.1855, + "step": 1210 + }, + { + "epoch": 1.6391333784698714, + "grad_norm": 1.0751426497234493, + "learning_rate": 6.513243366054191e-06, + "loss": 2.2038, + "step": 1211 + }, + { + "epoch": 1.6404874746106972, + "grad_norm": 1.0718361928319553, + "learning_rate": 6.508101245291109e-06, + "loss": 2.2788, + "step": 1212 + }, + { + "epoch": 1.6418415707515233, + "grad_norm": 1.1446897139146628, + "learning_rate": 6.502957369243757e-06, + "loss": 1.8886, + "step": 1213 + }, + { + "epoch": 1.6431956668923493, + "grad_norm": 1.0780227955508757, + "learning_rate": 6.497811743899112e-06, + "loss": 2.9019, + "step": 1214 + }, + { + "epoch": 1.6445497630331753, + "grad_norm": 1.1093588985005516, + "learning_rate": 6.492664375246185e-06, + "loss": 2.3378, + "step": 1215 + }, + { + "epoch": 1.6459038591740014, + "grad_norm": 1.2680934914767075, + "learning_rate": 6.487515269276015e-06, + "loss": 2.2777, + "step": 1216 + }, + { + "epoch": 1.6472579553148274, + "grad_norm": 1.0496344613229611, + "learning_rate": 6.482364431981667e-06, + "loss": 2.1846, + "step": 1217 + }, + { + "epoch": 1.6486120514556535, + "grad_norm": 0.8830408833414931, + "learning_rate": 6.4772118693582155e-06, + "loss": 2.4639, + "step": 1218 + }, + { + "epoch": 1.6499661475964793, + "grad_norm": 1.3617840820836493, + "learning_rate": 6.472057587402748e-06, + "loss": 2.1004, + "step": 1219 + }, + { + "epoch": 1.6513202437373053, + "grad_norm": 1.0634770023039701, + "learning_rate": 6.46690159211435e-06, + "loss": 2.3582, + "step": 1220 + }, + { + "epoch": 1.6526743398781314, + "grad_norm": 1.0462993743834461, + "learning_rate": 6.461743889494103e-06, + "loss": 2.1793, + "step": 1221 + }, + { + "epoch": 1.6540284360189572, + "grad_norm": 1.12270603128975, + "learning_rate": 6.456584485545075e-06, + "loss": 2.0312, + "step": 1222 + }, + { + "epoch": 1.6553825321597833, + "grad_norm": 1.0474924934857333, + "learning_rate": 6.451423386272312e-06, + "loss": 2.1907, + "step": 1223 + }, + { + "epoch": 1.6567366283006093, + "grad_norm": 1.2139372873126406, + "learning_rate": 6.4462605976828395e-06, + "loss": 2.1359, + "step": 1224 + }, + { + "epoch": 1.6580907244414353, + "grad_norm": 1.2375074680264349, + "learning_rate": 6.441096125785641e-06, + "loss": 2.7886, + "step": 1225 + }, + { + "epoch": 1.6594448205822614, + "grad_norm": 1.2726063282105258, + "learning_rate": 6.435929976591665e-06, + "loss": 1.8611, + "step": 1226 + }, + { + "epoch": 1.6607989167230874, + "grad_norm": 1.0325742896045942, + "learning_rate": 6.4307621561138114e-06, + "loss": 2.1808, + "step": 1227 + }, + { + "epoch": 1.6621530128639135, + "grad_norm": 1.1451127258616212, + "learning_rate": 6.425592670366923e-06, + "loss": 2.3795, + "step": 1228 + }, + { + "epoch": 1.6635071090047393, + "grad_norm": 1.0588080148815049, + "learning_rate": 6.420421525367783e-06, + "loss": 2.3691, + "step": 1229 + }, + { + "epoch": 1.6648612051455653, + "grad_norm": 1.1248420963082115, + "learning_rate": 6.415248727135103e-06, + "loss": 2.2255, + "step": 1230 + }, + { + "epoch": 1.6662153012863912, + "grad_norm": 1.12193324651476, + "learning_rate": 6.410074281689522e-06, + "loss": 2.2692, + "step": 1231 + }, + { + "epoch": 1.6675693974272172, + "grad_norm": 1.1007368907096031, + "learning_rate": 6.4048981950535975e-06, + "loss": 2.0518, + "step": 1232 + }, + { + "epoch": 1.6689234935680433, + "grad_norm": 1.06269170578331, + "learning_rate": 6.3997204732517915e-06, + "loss": 2.3531, + "step": 1233 + }, + { + "epoch": 1.6702775897088693, + "grad_norm": 1.1163412698869442, + "learning_rate": 6.394541122310473e-06, + "loss": 2.2524, + "step": 1234 + }, + { + "epoch": 1.6716316858496953, + "grad_norm": 1.0554457718352965, + "learning_rate": 6.389360148257908e-06, + "loss": 2.4751, + "step": 1235 + }, + { + "epoch": 1.6729857819905214, + "grad_norm": 1.1997008894725945, + "learning_rate": 6.384177557124247e-06, + "loss": 1.9252, + "step": 1236 + }, + { + "epoch": 1.6743398781313474, + "grad_norm": 1.079055031664645, + "learning_rate": 6.378993354941529e-06, + "loss": 2.3641, + "step": 1237 + }, + { + "epoch": 1.6756939742721735, + "grad_norm": 1.1896072037787495, + "learning_rate": 6.3738075477436655e-06, + "loss": 1.9261, + "step": 1238 + }, + { + "epoch": 1.6770480704129993, + "grad_norm": 0.9256492202916144, + "learning_rate": 6.368620141566432e-06, + "loss": 2.3076, + "step": 1239 + }, + { + "epoch": 1.6784021665538253, + "grad_norm": 1.1122465104394996, + "learning_rate": 6.363431142447469e-06, + "loss": 2.0237, + "step": 1240 + }, + { + "epoch": 1.6797562626946512, + "grad_norm": 0.9678398341378385, + "learning_rate": 6.358240556426274e-06, + "loss": 2.4974, + "step": 1241 + }, + { + "epoch": 1.6811103588354772, + "grad_norm": 0.9925297077996342, + "learning_rate": 6.353048389544185e-06, + "loss": 2.244, + "step": 1242 + }, + { + "epoch": 1.6824644549763033, + "grad_norm": 1.0616822439065718, + "learning_rate": 6.3478546478443844e-06, + "loss": 2.3992, + "step": 1243 + }, + { + "epoch": 1.6838185511171293, + "grad_norm": 1.1733624673460672, + "learning_rate": 6.342659337371884e-06, + "loss": 1.8391, + "step": 1244 + }, + { + "epoch": 1.6851726472579553, + "grad_norm": 1.2040137843680947, + "learning_rate": 6.3374624641735275e-06, + "loss": 1.8877, + "step": 1245 + }, + { + "epoch": 1.6865267433987814, + "grad_norm": 1.0453714976813977, + "learning_rate": 6.33226403429797e-06, + "loss": 2.0552, + "step": 1246 + }, + { + "epoch": 1.6878808395396074, + "grad_norm": 1.1426720858647532, + "learning_rate": 6.327064053795684e-06, + "loss": 2.2107, + "step": 1247 + }, + { + "epoch": 1.6892349356804333, + "grad_norm": 1.199062064451871, + "learning_rate": 6.321862528718945e-06, + "loss": 2.0693, + "step": 1248 + }, + { + "epoch": 1.6905890318212593, + "grad_norm": 1.091725236727943, + "learning_rate": 6.3166594651218235e-06, + "loss": 2.1291, + "step": 1249 + }, + { + "epoch": 1.6919431279620853, + "grad_norm": 1.1275607912830679, + "learning_rate": 6.311454869060188e-06, + "loss": 2.2288, + "step": 1250 + }, + { + "epoch": 1.6932972241029112, + "grad_norm": 1.236386694185598, + "learning_rate": 6.3062487465916825e-06, + "loss": 2.4416, + "step": 1251 + }, + { + "epoch": 1.6946513202437372, + "grad_norm": 0.9827244912709826, + "learning_rate": 6.301041103775731e-06, + "loss": 2.3147, + "step": 1252 + }, + { + "epoch": 1.6960054163845633, + "grad_norm": 1.2144425166077264, + "learning_rate": 6.295831946673527e-06, + "loss": 2.4805, + "step": 1253 + }, + { + "epoch": 1.6973595125253893, + "grad_norm": 0.9983619215236832, + "learning_rate": 6.290621281348028e-06, + "loss": 2.3773, + "step": 1254 + }, + { + "epoch": 1.6987136086662153, + "grad_norm": 1.465782148598409, + "learning_rate": 6.285409113863944e-06, + "loss": 2.6457, + "step": 1255 + }, + { + "epoch": 1.7000677048070414, + "grad_norm": 0.9944582045815571, + "learning_rate": 6.280195450287736e-06, + "loss": 2.3837, + "step": 1256 + }, + { + "epoch": 1.7014218009478674, + "grad_norm": 1.1585939861872792, + "learning_rate": 6.274980296687605e-06, + "loss": 2.2119, + "step": 1257 + }, + { + "epoch": 1.7027758970886933, + "grad_norm": 1.1437189700305217, + "learning_rate": 6.269763659133486e-06, + "loss": 1.8079, + "step": 1258 + }, + { + "epoch": 1.7041299932295193, + "grad_norm": 1.2582747800473197, + "learning_rate": 6.264545543697039e-06, + "loss": 2.5012, + "step": 1259 + }, + { + "epoch": 1.7054840893703453, + "grad_norm": 1.2978378068690752, + "learning_rate": 6.259325956451651e-06, + "loss": 2.9485, + "step": 1260 + }, + { + "epoch": 1.7068381855111712, + "grad_norm": 1.0424842976922624, + "learning_rate": 6.254104903472417e-06, + "loss": 2.2624, + "step": 1261 + }, + { + "epoch": 1.7081922816519972, + "grad_norm": 1.0868588376583306, + "learning_rate": 6.248882390836135e-06, + "loss": 2.3471, + "step": 1262 + }, + { + "epoch": 1.7095463777928233, + "grad_norm": 1.0151211458884728, + "learning_rate": 6.243658424621308e-06, + "loss": 1.9188, + "step": 1263 + }, + { + "epoch": 1.7109004739336493, + "grad_norm": 1.1267719574629707, + "learning_rate": 6.238433010908131e-06, + "loss": 2.0653, + "step": 1264 + }, + { + "epoch": 1.7122545700744753, + "grad_norm": 1.1710384477346152, + "learning_rate": 6.233206155778476e-06, + "loss": 2.8079, + "step": 1265 + }, + { + "epoch": 1.7136086662153014, + "grad_norm": 1.2399841383381511, + "learning_rate": 6.227977865315902e-06, + "loss": 2.3833, + "step": 1266 + }, + { + "epoch": 1.7149627623561274, + "grad_norm": 1.0116588623292786, + "learning_rate": 6.222748145605631e-06, + "loss": 2.4164, + "step": 1267 + }, + { + "epoch": 1.7163168584969533, + "grad_norm": 0.8527451840083117, + "learning_rate": 6.217517002734554e-06, + "loss": 2.1877, + "step": 1268 + }, + { + "epoch": 1.7176709546377793, + "grad_norm": 1.079554004708601, + "learning_rate": 6.212284442791217e-06, + "loss": 1.7948, + "step": 1269 + }, + { + "epoch": 1.7190250507786051, + "grad_norm": 0.9137877079458239, + "learning_rate": 6.207050471865814e-06, + "loss": 1.841, + "step": 1270 + }, + { + "epoch": 1.7203791469194312, + "grad_norm": 1.1473597440593684, + "learning_rate": 6.201815096050181e-06, + "loss": 1.9544, + "step": 1271 + }, + { + "epoch": 1.7217332430602572, + "grad_norm": 1.189027252854009, + "learning_rate": 6.1965783214377895e-06, + "loss": 2.2543, + "step": 1272 + }, + { + "epoch": 1.7230873392010833, + "grad_norm": 0.8466476995754849, + "learning_rate": 6.1913401541237414e-06, + "loss": 1.8128, + "step": 1273 + }, + { + "epoch": 1.7244414353419093, + "grad_norm": 1.320497220055551, + "learning_rate": 6.186100600204757e-06, + "loss": 1.9606, + "step": 1274 + }, + { + "epoch": 1.7257955314827353, + "grad_norm": 1.1503966715251264, + "learning_rate": 6.180859665779173e-06, + "loss": 2.0067, + "step": 1275 + }, + { + "epoch": 1.7271496276235614, + "grad_norm": 1.197795743633595, + "learning_rate": 6.175617356946928e-06, + "loss": 2.4943, + "step": 1276 + }, + { + "epoch": 1.7285037237643874, + "grad_norm": 1.153983741344922, + "learning_rate": 6.170373679809565e-06, + "loss": 2.6468, + "step": 1277 + }, + { + "epoch": 1.7298578199052133, + "grad_norm": 1.1521006556182283, + "learning_rate": 6.165128640470217e-06, + "loss": 2.3347, + "step": 1278 + }, + { + "epoch": 1.7312119160460393, + "grad_norm": 0.9816413381160185, + "learning_rate": 6.159882245033606e-06, + "loss": 2.4498, + "step": 1279 + }, + { + "epoch": 1.7325660121868651, + "grad_norm": 1.07736534022899, + "learning_rate": 6.1546344996060294e-06, + "loss": 2.1578, + "step": 1280 + }, + { + "epoch": 1.7339201083276912, + "grad_norm": 1.1324233134707422, + "learning_rate": 6.149385410295353e-06, + "loss": 2.1432, + "step": 1281 + }, + { + "epoch": 1.7352742044685172, + "grad_norm": 0.9905290656285861, + "learning_rate": 6.144134983211012e-06, + "loss": 1.872, + "step": 1282 + }, + { + "epoch": 1.7366283006093433, + "grad_norm": 1.121201508813207, + "learning_rate": 6.138883224463999e-06, + "loss": 1.9648, + "step": 1283 + }, + { + "epoch": 1.7379823967501693, + "grad_norm": 1.1862628290117698, + "learning_rate": 6.13363014016685e-06, + "loss": 2.1744, + "step": 1284 + }, + { + "epoch": 1.7393364928909953, + "grad_norm": 0.9830015960619232, + "learning_rate": 6.128375736433652e-06, + "loss": 2.5532, + "step": 1285 + }, + { + "epoch": 1.7406905890318214, + "grad_norm": 1.3321125394117272, + "learning_rate": 6.123120019380021e-06, + "loss": 1.8057, + "step": 1286 + }, + { + "epoch": 1.7420446851726472, + "grad_norm": 0.9980976929176649, + "learning_rate": 6.117862995123105e-06, + "loss": 2.337, + "step": 1287 + }, + { + "epoch": 1.7433987813134733, + "grad_norm": 1.3173652394689053, + "learning_rate": 6.112604669781572e-06, + "loss": 2.0408, + "step": 1288 + }, + { + "epoch": 1.7447528774542993, + "grad_norm": 1.126084316928292, + "learning_rate": 6.107345049475609e-06, + "loss": 2.079, + "step": 1289 + }, + { + "epoch": 1.7461069735951251, + "grad_norm": 1.0413819596877105, + "learning_rate": 6.102084140326901e-06, + "loss": 2.0356, + "step": 1290 + }, + { + "epoch": 1.7474610697359512, + "grad_norm": 1.189218565313232, + "learning_rate": 6.0968219484586415e-06, + "loss": 2.1497, + "step": 1291 + }, + { + "epoch": 1.7488151658767772, + "grad_norm": 1.2345681545643294, + "learning_rate": 6.091558479995512e-06, + "loss": 2.0314, + "step": 1292 + }, + { + "epoch": 1.7501692620176033, + "grad_norm": 1.2541579573695316, + "learning_rate": 6.086293741063685e-06, + "loss": 2.2602, + "step": 1293 + }, + { + "epoch": 1.7515233581584293, + "grad_norm": 1.4163932516550155, + "learning_rate": 6.081027737790804e-06, + "loss": 2.649, + "step": 1294 + }, + { + "epoch": 1.7528774542992553, + "grad_norm": 1.0677445213609345, + "learning_rate": 6.075760476305992e-06, + "loss": 1.6769, + "step": 1295 + }, + { + "epoch": 1.7542315504400814, + "grad_norm": 1.1305409453258632, + "learning_rate": 6.070491962739831e-06, + "loss": 2.1413, + "step": 1296 + }, + { + "epoch": 1.7555856465809072, + "grad_norm": 0.8589085134430466, + "learning_rate": 6.065222203224363e-06, + "loss": 2.2007, + "step": 1297 + }, + { + "epoch": 1.7569397427217333, + "grad_norm": 1.198177616657121, + "learning_rate": 6.05995120389308e-06, + "loss": 2.463, + "step": 1298 + }, + { + "epoch": 1.758293838862559, + "grad_norm": 1.0517431847975949, + "learning_rate": 6.054678970880915e-06, + "loss": 1.8492, + "step": 1299 + }, + { + "epoch": 1.7596479350033851, + "grad_norm": 1.0877464246501793, + "learning_rate": 6.049405510324237e-06, + "loss": 2.2445, + "step": 1300 + }, + { + "epoch": 1.7610020311442112, + "grad_norm": 0.9182819543184888, + "learning_rate": 6.04413082836085e-06, + "loss": 1.923, + "step": 1301 + }, + { + "epoch": 1.7623561272850372, + "grad_norm": 0.9479159720049781, + "learning_rate": 6.0388549311299696e-06, + "loss": 1.8528, + "step": 1302 + }, + { + "epoch": 1.7637102234258633, + "grad_norm": 1.1330012872888824, + "learning_rate": 6.033577824772234e-06, + "loss": 2.3405, + "step": 1303 + }, + { + "epoch": 1.7650643195666893, + "grad_norm": 1.156479517032631, + "learning_rate": 6.028299515429683e-06, + "loss": 2.4133, + "step": 1304 + }, + { + "epoch": 1.7664184157075153, + "grad_norm": 1.2849276017784446, + "learning_rate": 6.023020009245761e-06, + "loss": 2.0552, + "step": 1305 + }, + { + "epoch": 1.7677725118483414, + "grad_norm": 1.1081263537914658, + "learning_rate": 6.017739312365304e-06, + "loss": 1.9643, + "step": 1306 + }, + { + "epoch": 1.7691266079891672, + "grad_norm": 1.192468338343465, + "learning_rate": 6.012457430934532e-06, + "loss": 2.2097, + "step": 1307 + }, + { + "epoch": 1.7704807041299933, + "grad_norm": 1.3083267149971682, + "learning_rate": 6.007174371101045e-06, + "loss": 2.0671, + "step": 1308 + }, + { + "epoch": 1.771834800270819, + "grad_norm": 1.0164272321056016, + "learning_rate": 6.001890139013816e-06, + "loss": 2.1438, + "step": 1309 + }, + { + "epoch": 1.7731888964116451, + "grad_norm": 1.0169184208499216, + "learning_rate": 5.99660474082318e-06, + "loss": 1.6461, + "step": 1310 + }, + { + "epoch": 1.7745429925524712, + "grad_norm": 1.1208962249464, + "learning_rate": 5.99131818268083e-06, + "loss": 2.0891, + "step": 1311 + }, + { + "epoch": 1.7758970886932972, + "grad_norm": 0.9880243308990688, + "learning_rate": 5.986030470739811e-06, + "loss": 2.2771, + "step": 1312 + }, + { + "epoch": 1.7772511848341233, + "grad_norm": 1.0164372276741276, + "learning_rate": 5.980741611154508e-06, + "loss": 1.9089, + "step": 1313 + }, + { + "epoch": 1.7786052809749493, + "grad_norm": 1.1780962664030357, + "learning_rate": 5.975451610080643e-06, + "loss": 2.2283, + "step": 1314 + }, + { + "epoch": 1.7799593771157753, + "grad_norm": 1.2337816535300354, + "learning_rate": 5.970160473675266e-06, + "loss": 2.0219, + "step": 1315 + }, + { + "epoch": 1.7813134732566012, + "grad_norm": 1.231311259477204, + "learning_rate": 5.96486820809675e-06, + "loss": 2.263, + "step": 1316 + }, + { + "epoch": 1.7826675693974272, + "grad_norm": 1.1820145455048345, + "learning_rate": 5.959574819504782e-06, + "loss": 2.1722, + "step": 1317 + }, + { + "epoch": 1.7840216655382533, + "grad_norm": 1.0602621258357094, + "learning_rate": 5.954280314060353e-06, + "loss": 2.0836, + "step": 1318 + }, + { + "epoch": 1.785375761679079, + "grad_norm": 1.0769310348498187, + "learning_rate": 5.9489846979257596e-06, + "loss": 2.1107, + "step": 1319 + }, + { + "epoch": 1.7867298578199051, + "grad_norm": 1.1685570897175144, + "learning_rate": 5.943687977264584e-06, + "loss": 2.3701, + "step": 1320 + }, + { + "epoch": 1.7880839539607312, + "grad_norm": 1.0639504620411393, + "learning_rate": 5.938390158241701e-06, + "loss": 1.8519, + "step": 1321 + }, + { + "epoch": 1.7894380501015572, + "grad_norm": 1.0471961016733244, + "learning_rate": 5.933091247023258e-06, + "loss": 2.3008, + "step": 1322 + }, + { + "epoch": 1.7907921462423833, + "grad_norm": 1.079334192472969, + "learning_rate": 5.927791249776678e-06, + "loss": 1.9762, + "step": 1323 + }, + { + "epoch": 1.7921462423832093, + "grad_norm": 1.0698384472116922, + "learning_rate": 5.922490172670645e-06, + "loss": 2.3354, + "step": 1324 + }, + { + "epoch": 1.7935003385240353, + "grad_norm": 0.9830724595088538, + "learning_rate": 5.917188021875101e-06, + "loss": 2.0081, + "step": 1325 + }, + { + "epoch": 1.7948544346648612, + "grad_norm": 1.340171291705083, + "learning_rate": 5.91188480356124e-06, + "loss": 2.1949, + "step": 1326 + }, + { + "epoch": 1.7962085308056872, + "grad_norm": 1.0913921699387112, + "learning_rate": 5.906580523901493e-06, + "loss": 2.0597, + "step": 1327 + }, + { + "epoch": 1.7975626269465133, + "grad_norm": 1.0873779097080578, + "learning_rate": 5.90127518906953e-06, + "loss": 2.8079, + "step": 1328 + }, + { + "epoch": 1.798916723087339, + "grad_norm": 1.143892818041929, + "learning_rate": 5.89596880524025e-06, + "loss": 2.1394, + "step": 1329 + }, + { + "epoch": 1.8002708192281651, + "grad_norm": 0.9347818074040029, + "learning_rate": 5.89066137858977e-06, + "loss": 2.0658, + "step": 1330 + }, + { + "epoch": 1.8016249153689912, + "grad_norm": 1.0104844185810784, + "learning_rate": 5.885352915295426e-06, + "loss": 2.4573, + "step": 1331 + }, + { + "epoch": 1.8029790115098172, + "grad_norm": 1.2621189889416795, + "learning_rate": 5.880043421535751e-06, + "loss": 2.2771, + "step": 1332 + }, + { + "epoch": 1.8043331076506433, + "grad_norm": 0.9776230416310355, + "learning_rate": 5.874732903490489e-06, + "loss": 2.2122, + "step": 1333 + }, + { + "epoch": 1.8056872037914693, + "grad_norm": 1.1935436794161547, + "learning_rate": 5.869421367340565e-06, + "loss": 2.6218, + "step": 1334 + }, + { + "epoch": 1.8070412999322953, + "grad_norm": 1.166409774943222, + "learning_rate": 5.864108819268098e-06, + "loss": 2.1516, + "step": 1335 + }, + { + "epoch": 1.8083953960731212, + "grad_norm": 0.9563193799285069, + "learning_rate": 5.858795265456382e-06, + "loss": 2.1492, + "step": 1336 + }, + { + "epoch": 1.8097494922139472, + "grad_norm": 1.0173612877579, + "learning_rate": 5.853480712089875e-06, + "loss": 2.5045, + "step": 1337 + }, + { + "epoch": 1.811103588354773, + "grad_norm": 1.075936558534605, + "learning_rate": 5.8481651653542105e-06, + "loss": 1.8214, + "step": 1338 + }, + { + "epoch": 1.812457684495599, + "grad_norm": 1.2662166496893965, + "learning_rate": 5.842848631436169e-06, + "loss": 2.3341, + "step": 1339 + }, + { + "epoch": 1.8138117806364251, + "grad_norm": 1.114110647827857, + "learning_rate": 5.837531116523683e-06, + "loss": 1.9848, + "step": 1340 + }, + { + "epoch": 1.8151658767772512, + "grad_norm": 1.133467071972051, + "learning_rate": 5.832212626805825e-06, + "loss": 1.7728, + "step": 1341 + }, + { + "epoch": 1.8165199729180772, + "grad_norm": 1.1904363405626495, + "learning_rate": 5.826893168472807e-06, + "loss": 2.2031, + "step": 1342 + }, + { + "epoch": 1.8178740690589033, + "grad_norm": 1.16982781000476, + "learning_rate": 5.821572747715961e-06, + "loss": 2.2635, + "step": 1343 + }, + { + "epoch": 1.8192281651997293, + "grad_norm": 2.3913374538342995, + "learning_rate": 5.816251370727748e-06, + "loss": 2.7674, + "step": 1344 + }, + { + "epoch": 1.8205822613405553, + "grad_norm": 1.155071954814068, + "learning_rate": 5.810929043701733e-06, + "loss": 1.6222, + "step": 1345 + }, + { + "epoch": 1.8219363574813812, + "grad_norm": 1.1195068129462349, + "learning_rate": 5.805605772832592e-06, + "loss": 2.2405, + "step": 1346 + }, + { + "epoch": 1.8232904536222072, + "grad_norm": 1.0607883154488784, + "learning_rate": 5.800281564316098e-06, + "loss": 2.3167, + "step": 1347 + }, + { + "epoch": 1.824644549763033, + "grad_norm": 0.9198180067594192, + "learning_rate": 5.794956424349117e-06, + "loss": 2.2837, + "step": 1348 + }, + { + "epoch": 1.825998645903859, + "grad_norm": 1.1141503225834095, + "learning_rate": 5.789630359129599e-06, + "loss": 1.9332, + "step": 1349 + }, + { + "epoch": 1.8273527420446851, + "grad_norm": 1.1494560282733117, + "learning_rate": 5.784303374856566e-06, + "loss": 2.0177, + "step": 1350 + }, + { + "epoch": 1.8287068381855112, + "grad_norm": 0.9838628228119614, + "learning_rate": 5.778975477730117e-06, + "loss": 1.9256, + "step": 1351 + }, + { + "epoch": 1.8300609343263372, + "grad_norm": 1.0379092917122728, + "learning_rate": 5.773646673951406e-06, + "loss": 2.0192, + "step": 1352 + }, + { + "epoch": 1.8314150304671633, + "grad_norm": 1.0735535500692883, + "learning_rate": 5.768316969722651e-06, + "loss": 1.7708, + "step": 1353 + }, + { + "epoch": 1.8327691266079893, + "grad_norm": 1.1525689914627582, + "learning_rate": 5.762986371247111e-06, + "loss": 2.0803, + "step": 1354 + }, + { + "epoch": 1.8341232227488151, + "grad_norm": 1.2969233616374964, + "learning_rate": 5.757654884729087e-06, + "loss": 1.7907, + "step": 1355 + }, + { + "epoch": 1.8354773188896412, + "grad_norm": 1.2729837968386013, + "learning_rate": 5.752322516373916e-06, + "loss": 1.9638, + "step": 1356 + }, + { + "epoch": 1.8368314150304672, + "grad_norm": 1.123620270200479, + "learning_rate": 5.746989272387959e-06, + "loss": 2.4644, + "step": 1357 + }, + { + "epoch": 1.838185511171293, + "grad_norm": 1.1478076870568643, + "learning_rate": 5.741655158978598e-06, + "loss": 2.3496, + "step": 1358 + }, + { + "epoch": 1.839539607312119, + "grad_norm": 0.9606164841242715, + "learning_rate": 5.736320182354227e-06, + "loss": 2.3232, + "step": 1359 + }, + { + "epoch": 1.8408937034529451, + "grad_norm": 0.9434545645826506, + "learning_rate": 5.730984348724242e-06, + "loss": 2.1405, + "step": 1360 + }, + { + "epoch": 1.8422477995937712, + "grad_norm": 1.1057923006106751, + "learning_rate": 5.725647664299039e-06, + "loss": 2.9284, + "step": 1361 + }, + { + "epoch": 1.8436018957345972, + "grad_norm": 1.0188797974653283, + "learning_rate": 5.720310135290002e-06, + "loss": 2.1123, + "step": 1362 + }, + { + "epoch": 1.8449559918754233, + "grad_norm": 1.4800000718890725, + "learning_rate": 5.7149717679095026e-06, + "loss": 2.5262, + "step": 1363 + }, + { + "epoch": 1.8463100880162493, + "grad_norm": 1.4201072710479334, + "learning_rate": 5.709632568370884e-06, + "loss": 1.9675, + "step": 1364 + }, + { + "epoch": 1.8476641841570751, + "grad_norm": 1.0271104317135864, + "learning_rate": 5.704292542888458e-06, + "loss": 2.3992, + "step": 1365 + }, + { + "epoch": 1.8490182802979012, + "grad_norm": 1.1894861709189837, + "learning_rate": 5.698951697677498e-06, + "loss": 2.3255, + "step": 1366 + }, + { + "epoch": 1.850372376438727, + "grad_norm": 0.9818606413266471, + "learning_rate": 5.6936100389542345e-06, + "loss": 2.068, + "step": 1367 + }, + { + "epoch": 1.851726472579553, + "grad_norm": 1.1613550136148025, + "learning_rate": 5.688267572935843e-06, + "loss": 2.0782, + "step": 1368 + }, + { + "epoch": 1.853080568720379, + "grad_norm": 1.2604918956006983, + "learning_rate": 5.682924305840435e-06, + "loss": 2.3411, + "step": 1369 + }, + { + "epoch": 1.8544346648612051, + "grad_norm": 1.0429238519002082, + "learning_rate": 5.6775802438870596e-06, + "loss": 2.3507, + "step": 1370 + }, + { + "epoch": 1.8557887610020312, + "grad_norm": 1.1476866829926053, + "learning_rate": 5.6722353932956895e-06, + "loss": 2.1169, + "step": 1371 + }, + { + "epoch": 1.8571428571428572, + "grad_norm": 1.2071680463300853, + "learning_rate": 5.6668897602872145e-06, + "loss": 2.3788, + "step": 1372 + }, + { + "epoch": 1.8584969532836833, + "grad_norm": 1.330454708741957, + "learning_rate": 5.661543351083435e-06, + "loss": 2.3059, + "step": 1373 + }, + { + "epoch": 1.8598510494245093, + "grad_norm": 1.0666387576576266, + "learning_rate": 5.656196171907055e-06, + "loss": 1.988, + "step": 1374 + }, + { + "epoch": 1.8612051455653351, + "grad_norm": 1.1222282028435293, + "learning_rate": 5.650848228981676e-06, + "loss": 2.3535, + "step": 1375 + }, + { + "epoch": 1.8625592417061612, + "grad_norm": 1.0691636063361554, + "learning_rate": 5.645499528531785e-06, + "loss": 2.3795, + "step": 1376 + }, + { + "epoch": 1.863913337846987, + "grad_norm": 1.0416420607905428, + "learning_rate": 5.640150076782755e-06, + "loss": 2.2143, + "step": 1377 + }, + { + "epoch": 1.865267433987813, + "grad_norm": 1.1126322928122026, + "learning_rate": 5.634799879960833e-06, + "loss": 1.8687, + "step": 1378 + }, + { + "epoch": 1.866621530128639, + "grad_norm": 1.0283378507513847, + "learning_rate": 5.629448944293128e-06, + "loss": 2.3348, + "step": 1379 + }, + { + "epoch": 1.8679756262694651, + "grad_norm": 1.1086979474304333, + "learning_rate": 5.624097276007614e-06, + "loss": 2.4503, + "step": 1380 + }, + { + "epoch": 1.8693297224102912, + "grad_norm": 1.0114717774494804, + "learning_rate": 5.618744881333117e-06, + "loss": 2.2857, + "step": 1381 + }, + { + "epoch": 1.8706838185511172, + "grad_norm": 1.271521217949957, + "learning_rate": 5.613391766499308e-06, + "loss": 2.1678, + "step": 1382 + }, + { + "epoch": 1.8720379146919433, + "grad_norm": 1.0690493430782453, + "learning_rate": 5.6080379377366936e-06, + "loss": 2.0153, + "step": 1383 + }, + { + "epoch": 1.873392010832769, + "grad_norm": 1.139241558729131, + "learning_rate": 5.6026834012766155e-06, + "loss": 2.6149, + "step": 1384 + }, + { + "epoch": 1.8747461069735951, + "grad_norm": 1.0202843691038828, + "learning_rate": 5.597328163351237e-06, + "loss": 2.3195, + "step": 1385 + }, + { + "epoch": 1.8761002031144212, + "grad_norm": 1.0587205093081162, + "learning_rate": 5.59197223019354e-06, + "loss": 2.369, + "step": 1386 + }, + { + "epoch": 1.877454299255247, + "grad_norm": 1.0246205891822728, + "learning_rate": 5.586615608037309e-06, + "loss": 1.7189, + "step": 1387 + }, + { + "epoch": 1.878808395396073, + "grad_norm": 1.2556625533663983, + "learning_rate": 5.581258303117138e-06, + "loss": 2.6877, + "step": 1388 + }, + { + "epoch": 1.880162491536899, + "grad_norm": 1.0753392702185522, + "learning_rate": 5.575900321668414e-06, + "loss": 2.2089, + "step": 1389 + }, + { + "epoch": 1.8815165876777251, + "grad_norm": 1.0364442011380954, + "learning_rate": 5.5705416699273085e-06, + "loss": 2.2061, + "step": 1390 + }, + { + "epoch": 1.8828706838185512, + "grad_norm": 0.9721557814407294, + "learning_rate": 5.565182354130776e-06, + "loss": 2.0579, + "step": 1391 + }, + { + "epoch": 1.8842247799593772, + "grad_norm": 1.0946729786176126, + "learning_rate": 5.559822380516539e-06, + "loss": 1.8052, + "step": 1392 + }, + { + "epoch": 1.8855788761002032, + "grad_norm": 1.2816555939803111, + "learning_rate": 5.554461755323095e-06, + "loss": 2.5817, + "step": 1393 + }, + { + "epoch": 1.886932972241029, + "grad_norm": 1.132710357655246, + "learning_rate": 5.54910048478969e-06, + "loss": 2.0912, + "step": 1394 + }, + { + "epoch": 1.8882870683818551, + "grad_norm": 1.4036561544628778, + "learning_rate": 5.5437385751563265e-06, + "loss": 1.9642, + "step": 1395 + }, + { + "epoch": 1.8896411645226812, + "grad_norm": 0.9338991067427596, + "learning_rate": 5.5383760326637525e-06, + "loss": 2.107, + "step": 1396 + }, + { + "epoch": 1.890995260663507, + "grad_norm": 1.0328732328422332, + "learning_rate": 5.533012863553445e-06, + "loss": 2.0279, + "step": 1397 + }, + { + "epoch": 1.892349356804333, + "grad_norm": 1.0913343945197256, + "learning_rate": 5.527649074067618e-06, + "loss": 2.037, + "step": 1398 + }, + { + "epoch": 1.893703452945159, + "grad_norm": 1.1464521194384492, + "learning_rate": 5.522284670449204e-06, + "loss": 1.9054, + "step": 1399 + }, + { + "epoch": 1.8950575490859851, + "grad_norm": 1.7305402471538387, + "learning_rate": 5.51691965894185e-06, + "loss": 2.1952, + "step": 1400 + }, + { + "epoch": 1.8964116452268112, + "grad_norm": 0.9382393928786104, + "learning_rate": 5.511554045789915e-06, + "loss": 1.937, + "step": 1401 + }, + { + "epoch": 1.8977657413676372, + "grad_norm": 1.042649655218192, + "learning_rate": 5.506187837238451e-06, + "loss": 1.9652, + "step": 1402 + }, + { + "epoch": 1.8991198375084632, + "grad_norm": 1.1266190255444637, + "learning_rate": 5.5008210395332095e-06, + "loss": 2.5897, + "step": 1403 + }, + { + "epoch": 1.900473933649289, + "grad_norm": 1.0568485706741606, + "learning_rate": 5.495453658920622e-06, + "loss": 2.1456, + "step": 1404 + }, + { + "epoch": 1.9018280297901151, + "grad_norm": 2.3042054688617517, + "learning_rate": 5.490085701647805e-06, + "loss": 2.4831, + "step": 1405 + }, + { + "epoch": 1.903182125930941, + "grad_norm": 1.0661084879580494, + "learning_rate": 5.484717173962538e-06, + "loss": 1.979, + "step": 1406 + }, + { + "epoch": 1.904536222071767, + "grad_norm": 1.8624940514969899, + "learning_rate": 5.479348082113273e-06, + "loss": 2.2721, + "step": 1407 + }, + { + "epoch": 1.905890318212593, + "grad_norm": 1.0349458998467256, + "learning_rate": 5.4739784323491115e-06, + "loss": 2.1855, + "step": 1408 + }, + { + "epoch": 1.907244414353419, + "grad_norm": 1.0420374247356121, + "learning_rate": 5.468608230919811e-06, + "loss": 1.9857, + "step": 1409 + }, + { + "epoch": 1.9085985104942451, + "grad_norm": 1.0571913625970193, + "learning_rate": 5.463237484075765e-06, + "loss": 1.9137, + "step": 1410 + }, + { + "epoch": 1.9099526066350712, + "grad_norm": 1.148192788565203, + "learning_rate": 5.457866198068006e-06, + "loss": 1.9036, + "step": 1411 + }, + { + "epoch": 1.9113067027758972, + "grad_norm": 0.9825967829135833, + "learning_rate": 5.45249437914819e-06, + "loss": 2.4171, + "step": 1412 + }, + { + "epoch": 1.9126607989167232, + "grad_norm": 1.070670848522326, + "learning_rate": 5.4471220335685985e-06, + "loss": 2.2573, + "step": 1413 + }, + { + "epoch": 1.914014895057549, + "grad_norm": 1.1613970345507367, + "learning_rate": 5.44174916758212e-06, + "loss": 2.1134, + "step": 1414 + }, + { + "epoch": 1.9153689911983751, + "grad_norm": 0.9179865288261003, + "learning_rate": 5.436375787442256e-06, + "loss": 2.2131, + "step": 1415 + }, + { + "epoch": 1.916723087339201, + "grad_norm": 1.276005671850693, + "learning_rate": 5.4310018994030974e-06, + "loss": 2.0346, + "step": 1416 + }, + { + "epoch": 1.918077183480027, + "grad_norm": 1.1781979348827227, + "learning_rate": 5.425627509719336e-06, + "loss": 2.0975, + "step": 1417 + }, + { + "epoch": 1.919431279620853, + "grad_norm": 1.1588736839015075, + "learning_rate": 5.420252624646238e-06, + "loss": 2.8176, + "step": 1418 + }, + { + "epoch": 1.920785375761679, + "grad_norm": 1.0396625706559195, + "learning_rate": 5.414877250439654e-06, + "loss": 2.2559, + "step": 1419 + }, + { + "epoch": 1.9221394719025051, + "grad_norm": 1.1691760754641374, + "learning_rate": 5.409501393356001e-06, + "loss": 2.134, + "step": 1420 + }, + { + "epoch": 1.9234935680433312, + "grad_norm": 1.0188156446108358, + "learning_rate": 5.404125059652255e-06, + "loss": 3.1594, + "step": 1421 + }, + { + "epoch": 1.9248476641841572, + "grad_norm": 1.1384064676805719, + "learning_rate": 5.398748255585952e-06, + "loss": 2.3359, + "step": 1422 + }, + { + "epoch": 1.926201760324983, + "grad_norm": 1.0703628157280833, + "learning_rate": 5.39337098741517e-06, + "loss": 2.322, + "step": 1423 + }, + { + "epoch": 1.927555856465809, + "grad_norm": 3.5972509087878834, + "learning_rate": 5.387993261398532e-06, + "loss": 2.0459, + "step": 1424 + }, + { + "epoch": 1.9289099526066351, + "grad_norm": 1.1278074196782333, + "learning_rate": 5.3826150837951925e-06, + "loss": 2.1835, + "step": 1425 + }, + { + "epoch": 1.930264048747461, + "grad_norm": 1.1684466435119685, + "learning_rate": 5.3772364608648304e-06, + "loss": 2.5017, + "step": 1426 + }, + { + "epoch": 1.931618144888287, + "grad_norm": 1.1988587658843286, + "learning_rate": 5.371857398867644e-06, + "loss": 2.4385, + "step": 1427 + }, + { + "epoch": 1.932972241029113, + "grad_norm": 0.943349610365291, + "learning_rate": 5.36647790406434e-06, + "loss": 2.1582, + "step": 1428 + }, + { + "epoch": 1.934326337169939, + "grad_norm": 1.0186861984009383, + "learning_rate": 5.361097982716133e-06, + "loss": 2.0697, + "step": 1429 + }, + { + "epoch": 1.9356804333107651, + "grad_norm": 1.0863559782982186, + "learning_rate": 5.355717641084733e-06, + "loss": 2.6047, + "step": 1430 + }, + { + "epoch": 1.9370345294515912, + "grad_norm": 1.2753139571991683, + "learning_rate": 5.350336885432337e-06, + "loss": 2.5542, + "step": 1431 + }, + { + "epoch": 1.9383886255924172, + "grad_norm": 1.2432570870357769, + "learning_rate": 5.3449557220216245e-06, + "loss": 2.465, + "step": 1432 + }, + { + "epoch": 1.939742721733243, + "grad_norm": 1.1214426550882808, + "learning_rate": 5.339574157115752e-06, + "loss": 1.8317, + "step": 1433 + }, + { + "epoch": 1.941096817874069, + "grad_norm": 1.1278145331998868, + "learning_rate": 5.334192196978341e-06, + "loss": 1.8582, + "step": 1434 + }, + { + "epoch": 1.942450914014895, + "grad_norm": 0.9703824311730281, + "learning_rate": 5.328809847873472e-06, + "loss": 2.0282, + "step": 1435 + }, + { + "epoch": 1.943805010155721, + "grad_norm": 1.1313991479024939, + "learning_rate": 5.32342711606568e-06, + "loss": 2.2019, + "step": 1436 + }, + { + "epoch": 1.945159106296547, + "grad_norm": 1.1285464646294614, + "learning_rate": 5.318044007819948e-06, + "loss": 2.2181, + "step": 1437 + }, + { + "epoch": 1.946513202437373, + "grad_norm": 1.2286900340042946, + "learning_rate": 5.312660529401693e-06, + "loss": 1.8868, + "step": 1438 + }, + { + "epoch": 1.947867298578199, + "grad_norm": 1.0620287314695505, + "learning_rate": 5.307276687076762e-06, + "loss": 2.5188, + "step": 1439 + }, + { + "epoch": 1.9492213947190251, + "grad_norm": 1.0392077445928851, + "learning_rate": 5.301892487111431e-06, + "loss": 2.0289, + "step": 1440 + }, + { + "epoch": 1.9505754908598512, + "grad_norm": 1.3207900398189212, + "learning_rate": 5.296507935772386e-06, + "loss": 2.2526, + "step": 1441 + }, + { + "epoch": 1.9519295870006772, + "grad_norm": 1.407831485125338, + "learning_rate": 5.291123039326728e-06, + "loss": 2.3924, + "step": 1442 + }, + { + "epoch": 1.953283683141503, + "grad_norm": 1.092640851593957, + "learning_rate": 5.285737804041955e-06, + "loss": 2.2856, + "step": 1443 + }, + { + "epoch": 1.954637779282329, + "grad_norm": 1.0720656311911092, + "learning_rate": 5.2803522361859596e-06, + "loss": 2.3745, + "step": 1444 + }, + { + "epoch": 1.955991875423155, + "grad_norm": 1.0470209379216529, + "learning_rate": 5.274966342027025e-06, + "loss": 2.4369, + "step": 1445 + }, + { + "epoch": 1.957345971563981, + "grad_norm": 1.7795808290406938, + "learning_rate": 5.269580127833812e-06, + "loss": 2.2505, + "step": 1446 + }, + { + "epoch": 1.958700067704807, + "grad_norm": 0.949115783937015, + "learning_rate": 5.264193599875353e-06, + "loss": 2.1088, + "step": 1447 + }, + { + "epoch": 1.960054163845633, + "grad_norm": 1.0307094217484138, + "learning_rate": 5.258806764421048e-06, + "loss": 2.0897, + "step": 1448 + }, + { + "epoch": 1.961408259986459, + "grad_norm": 1.0944949048376986, + "learning_rate": 5.253419627740651e-06, + "loss": 2.2959, + "step": 1449 + }, + { + "epoch": 1.9627623561272851, + "grad_norm": 1.0529310025241865, + "learning_rate": 5.248032196104271e-06, + "loss": 2.5453, + "step": 1450 + }, + { + "epoch": 1.9641164522681112, + "grad_norm": 1.135872662601797, + "learning_rate": 5.2426444757823595e-06, + "loss": 2.1203, + "step": 1451 + }, + { + "epoch": 1.965470548408937, + "grad_norm": 1.0238104015543514, + "learning_rate": 5.2372564730457e-06, + "loss": 2.2604, + "step": 1452 + }, + { + "epoch": 1.966824644549763, + "grad_norm": 0.9503239116070696, + "learning_rate": 5.231868194165409e-06, + "loss": 2.4823, + "step": 1453 + }, + { + "epoch": 1.968178740690589, + "grad_norm": 1.1160937498521495, + "learning_rate": 5.226479645412923e-06, + "loss": 2.2582, + "step": 1454 + }, + { + "epoch": 1.969532836831415, + "grad_norm": 1.0681616120180237, + "learning_rate": 5.221090833059992e-06, + "loss": 2.1064, + "step": 1455 + }, + { + "epoch": 1.970886932972241, + "grad_norm": 1.2336888814525633, + "learning_rate": 5.215701763378673e-06, + "loss": 2.2147, + "step": 1456 + }, + { + "epoch": 1.972241029113067, + "grad_norm": 2.70975758573399, + "learning_rate": 5.210312442641327e-06, + "loss": 2.0919, + "step": 1457 + }, + { + "epoch": 1.973595125253893, + "grad_norm": 1.062018871284572, + "learning_rate": 5.204922877120597e-06, + "loss": 2.1447, + "step": 1458 + }, + { + "epoch": 1.974949221394719, + "grad_norm": 1.0661389981794334, + "learning_rate": 5.19953307308942e-06, + "loss": 1.9739, + "step": 1459 + }, + { + "epoch": 1.9763033175355451, + "grad_norm": 1.0931038252658167, + "learning_rate": 5.194143036821008e-06, + "loss": 1.9725, + "step": 1460 + }, + { + "epoch": 1.9776574136763712, + "grad_norm": 1.0840157871848553, + "learning_rate": 5.188752774588841e-06, + "loss": 2.2702, + "step": 1461 + }, + { + "epoch": 1.979011509817197, + "grad_norm": 1.0991100379851682, + "learning_rate": 5.183362292666667e-06, + "loss": 2.2273, + "step": 1462 + }, + { + "epoch": 1.980365605958023, + "grad_norm": 1.0930537215096972, + "learning_rate": 5.177971597328484e-06, + "loss": 2.2848, + "step": 1463 + }, + { + "epoch": 1.981719702098849, + "grad_norm": 1.1727805767142325, + "learning_rate": 5.172580694848541e-06, + "loss": 2.2295, + "step": 1464 + }, + { + "epoch": 1.983073798239675, + "grad_norm": 1.0331418445485365, + "learning_rate": 5.16718959150133e-06, + "loss": 2.1311, + "step": 1465 + }, + { + "epoch": 1.984427894380501, + "grad_norm": 1.1782815464908813, + "learning_rate": 5.161798293561573e-06, + "loss": 2.2843, + "step": 1466 + }, + { + "epoch": 1.985781990521327, + "grad_norm": 1.1287633720503536, + "learning_rate": 5.156406807304223e-06, + "loss": 2.2715, + "step": 1467 + }, + { + "epoch": 1.987136086662153, + "grad_norm": 1.190713586693506, + "learning_rate": 5.151015139004445e-06, + "loss": 1.8253, + "step": 1468 + }, + { + "epoch": 1.988490182802979, + "grad_norm": 1.070858654181211, + "learning_rate": 5.145623294937624e-06, + "loss": 2.1808, + "step": 1469 + }, + { + "epoch": 1.9898442789438051, + "grad_norm": 1.1615785503909837, + "learning_rate": 5.140231281379345e-06, + "loss": 1.8325, + "step": 1470 + }, + { + "epoch": 1.9911983750846312, + "grad_norm": 0.983503119817654, + "learning_rate": 5.134839104605394e-06, + "loss": 2.0685, + "step": 1471 + }, + { + "epoch": 1.992552471225457, + "grad_norm": 1.1464176386439313, + "learning_rate": 5.129446770891738e-06, + "loss": 2.325, + "step": 1472 + }, + { + "epoch": 1.993906567366283, + "grad_norm": 1.62246860837646, + "learning_rate": 5.124054286514538e-06, + "loss": 2.6138, + "step": 1473 + }, + { + "epoch": 1.9952606635071088, + "grad_norm": 1.0277305356811535, + "learning_rate": 5.118661657750122e-06, + "loss": 2.1659, + "step": 1474 + }, + { + "epoch": 1.996614759647935, + "grad_norm": 1.054556455145067, + "learning_rate": 5.113268890874994e-06, + "loss": 2.5919, + "step": 1475 + }, + { + "epoch": 2.001354096140826, + "grad_norm": 1.1099987816340893, + "learning_rate": 5.107875992165809e-06, + "loss": 1.6184, + "step": 1476 + } + ], + "logging_steps": 1, + "max_steps": 2952, + "num_input_tokens_seen": 0, + "num_train_epochs": 4, + "save_steps": 738, + "stateful_callbacks": { + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": false + }, + "attributes": {} + } + }, + "total_flos": 3.8997223198949376e+17, + "train_batch_size": 1, + "trial_name": null, + "trial_params": null +}