fic-rubert-tiny-2-chckpnt800 / trainer_state.json
lightsource's picture
Upload folder using huggingface_hub
7bfdf7e verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.2408963585434174,
"eval_steps": 500,
"global_step": 800,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0028011204481792717,
"grad_norm": 0.296756774187088,
"learning_rate": 8.333333333333333e-07,
"loss": 0.1718,
"step": 1
},
{
"epoch": 0.0056022408963585435,
"grad_norm": 0.36025628447532654,
"learning_rate": 1.6666666666666667e-06,
"loss": 0.133,
"step": 2
},
{
"epoch": 0.008403361344537815,
"grad_norm": 0.3665211796760559,
"learning_rate": 2.5e-06,
"loss": 0.1346,
"step": 3
},
{
"epoch": 0.011204481792717087,
"grad_norm": 0.47092393040657043,
"learning_rate": 3.3333333333333333e-06,
"loss": 0.2136,
"step": 4
},
{
"epoch": 0.014005602240896359,
"grad_norm": 0.5220111608505249,
"learning_rate": 4.166666666666667e-06,
"loss": 0.2203,
"step": 5
},
{
"epoch": 0.01680672268907563,
"grad_norm": 0.6161433458328247,
"learning_rate": 5e-06,
"loss": 0.0893,
"step": 6
},
{
"epoch": 0.0196078431372549,
"grad_norm": 0.34641674160957336,
"learning_rate": 5.833333333333334e-06,
"loss": 0.1721,
"step": 7
},
{
"epoch": 0.022408963585434174,
"grad_norm": 0.6245524287223816,
"learning_rate": 6.666666666666667e-06,
"loss": 0.0883,
"step": 8
},
{
"epoch": 0.025210084033613446,
"grad_norm": 0.32467812299728394,
"learning_rate": 7.5e-06,
"loss": 0.167,
"step": 9
},
{
"epoch": 0.028011204481792718,
"grad_norm": 0.7720906138420105,
"learning_rate": 8.333333333333334e-06,
"loss": 0.2531,
"step": 10
},
{
"epoch": 0.03081232492997199,
"grad_norm": 0.38567203283309937,
"learning_rate": 9.166666666666666e-06,
"loss": 0.1309,
"step": 11
},
{
"epoch": 0.03361344537815126,
"grad_norm": 0.5268588662147522,
"learning_rate": 1e-05,
"loss": 0.2119,
"step": 12
},
{
"epoch": 0.036414565826330535,
"grad_norm": 0.3139660060405731,
"learning_rate": 1.0833333333333334e-05,
"loss": 0.1741,
"step": 13
},
{
"epoch": 0.0392156862745098,
"grad_norm": 0.35874423384666443,
"learning_rate": 1.1666666666666668e-05,
"loss": 0.1337,
"step": 14
},
{
"epoch": 0.04201680672268908,
"grad_norm": 0.3121141195297241,
"learning_rate": 1.25e-05,
"loss": 0.1745,
"step": 15
},
{
"epoch": 0.04481792717086835,
"grad_norm": 0.4937639534473419,
"learning_rate": 1.3333333333333333e-05,
"loss": 0.208,
"step": 16
},
{
"epoch": 0.047619047619047616,
"grad_norm": 1.4663589000701904,
"learning_rate": 1.4166666666666668e-05,
"loss": 0.3469,
"step": 17
},
{
"epoch": 0.05042016806722689,
"grad_norm": 0.3288300633430481,
"learning_rate": 1.5e-05,
"loss": 0.1735,
"step": 18
},
{
"epoch": 0.05322128851540616,
"grad_norm": 0.7657830119132996,
"learning_rate": 1.5833333333333333e-05,
"loss": 0.252,
"step": 19
},
{
"epoch": 0.056022408963585436,
"grad_norm": 0.36719515919685364,
"learning_rate": 1.6666666666666667e-05,
"loss": 0.1694,
"step": 20
},
{
"epoch": 0.058823529411764705,
"grad_norm": 0.2963164448738098,
"learning_rate": 1.75e-05,
"loss": 0.1647,
"step": 21
},
{
"epoch": 0.06162464985994398,
"grad_norm": 0.2958138883113861,
"learning_rate": 1.8333333333333333e-05,
"loss": 0.1738,
"step": 22
},
{
"epoch": 0.06442577030812324,
"grad_norm": 0.6379082202911377,
"learning_rate": 1.9166666666666667e-05,
"loss": 0.0961,
"step": 23
},
{
"epoch": 0.06722689075630252,
"grad_norm": 0.40202125906944275,
"learning_rate": 2e-05,
"loss": 0.1353,
"step": 24
},
{
"epoch": 0.0700280112044818,
"grad_norm": 0.631089448928833,
"learning_rate": 2.0833333333333336e-05,
"loss": 0.0944,
"step": 25
},
{
"epoch": 0.07282913165266107,
"grad_norm": 0.6378069519996643,
"learning_rate": 2.1666666666666667e-05,
"loss": 0.0931,
"step": 26
},
{
"epoch": 0.07563025210084033,
"grad_norm": 0.3574202060699463,
"learning_rate": 2.25e-05,
"loss": 0.1729,
"step": 27
},
{
"epoch": 0.0784313725490196,
"grad_norm": 0.3499799370765686,
"learning_rate": 2.3333333333333336e-05,
"loss": 0.1732,
"step": 28
},
{
"epoch": 0.08123249299719888,
"grad_norm": 0.5255885124206543,
"learning_rate": 2.4166666666666667e-05,
"loss": 0.2186,
"step": 29
},
{
"epoch": 0.08403361344537816,
"grad_norm": 0.35638874769210815,
"learning_rate": 2.5e-05,
"loss": 0.1338,
"step": 30
},
{
"epoch": 0.08683473389355742,
"grad_norm": 0.3849603235721588,
"learning_rate": 2.5833333333333336e-05,
"loss": 0.1326,
"step": 31
},
{
"epoch": 0.0896358543417367,
"grad_norm": 0.32195600867271423,
"learning_rate": 2.6666666666666667e-05,
"loss": 0.1715,
"step": 32
},
{
"epoch": 0.09243697478991597,
"grad_norm": 0.8319142460823059,
"learning_rate": 2.7500000000000004e-05,
"loss": 0.2664,
"step": 33
},
{
"epoch": 0.09523809523809523,
"grad_norm": 0.7899373769760132,
"learning_rate": 2.8333333333333335e-05,
"loss": 0.2529,
"step": 34
},
{
"epoch": 0.09803921568627451,
"grad_norm": 0.5739774107933044,
"learning_rate": 2.916666666666667e-05,
"loss": 0.0865,
"step": 35
},
{
"epoch": 0.10084033613445378,
"grad_norm": 0.3802482783794403,
"learning_rate": 3e-05,
"loss": 0.131,
"step": 36
},
{
"epoch": 0.10364145658263306,
"grad_norm": 0.4794873893260956,
"learning_rate": 3.0833333333333335e-05,
"loss": 0.2136,
"step": 37
},
{
"epoch": 0.10644257703081232,
"grad_norm": 0.3657197654247284,
"learning_rate": 3.1666666666666666e-05,
"loss": 0.1318,
"step": 38
},
{
"epoch": 0.1092436974789916,
"grad_norm": 0.3748391270637512,
"learning_rate": 3.2500000000000004e-05,
"loss": 0.1775,
"step": 39
},
{
"epoch": 0.11204481792717087,
"grad_norm": 0.4693293869495392,
"learning_rate": 3.3333333333333335e-05,
"loss": 0.2114,
"step": 40
},
{
"epoch": 0.11484593837535013,
"grad_norm": 0.3253132998943329,
"learning_rate": 3.4166666666666666e-05,
"loss": 0.1775,
"step": 41
},
{
"epoch": 0.11764705882352941,
"grad_norm": 0.5953426361083984,
"learning_rate": 3.5e-05,
"loss": 0.2213,
"step": 42
},
{
"epoch": 0.12044817927170869,
"grad_norm": 0.3064591586589813,
"learning_rate": 3.5833333333333335e-05,
"loss": 0.1328,
"step": 43
},
{
"epoch": 0.12324929971988796,
"grad_norm": 0.37691906094551086,
"learning_rate": 3.6666666666666666e-05,
"loss": 0.131,
"step": 44
},
{
"epoch": 0.12605042016806722,
"grad_norm": 1.2265459299087524,
"learning_rate": 3.7500000000000003e-05,
"loss": 0.3374,
"step": 45
},
{
"epoch": 0.12885154061624648,
"grad_norm": 0.30182263255119324,
"learning_rate": 3.8333333333333334e-05,
"loss": 0.1743,
"step": 46
},
{
"epoch": 0.13165266106442577,
"grad_norm": 0.3886899948120117,
"learning_rate": 3.9166666666666665e-05,
"loss": 0.131,
"step": 47
},
{
"epoch": 0.13445378151260504,
"grad_norm": 0.5219215154647827,
"learning_rate": 4e-05,
"loss": 0.2145,
"step": 48
},
{
"epoch": 0.13725490196078433,
"grad_norm": 0.2958211898803711,
"learning_rate": 4.0833333333333334e-05,
"loss": 0.1715,
"step": 49
},
{
"epoch": 0.1400560224089636,
"grad_norm": 0.290129154920578,
"learning_rate": 4.166666666666667e-05,
"loss": 0.1739,
"step": 50
},
{
"epoch": 0.14285714285714285,
"grad_norm": 0.3508636951446533,
"learning_rate": 4.25e-05,
"loss": 0.132,
"step": 51
},
{
"epoch": 0.14565826330532214,
"grad_norm": 0.6013919711112976,
"learning_rate": 4.3333333333333334e-05,
"loss": 0.0944,
"step": 52
},
{
"epoch": 0.1484593837535014,
"grad_norm": 0.5936094522476196,
"learning_rate": 4.4166666666666665e-05,
"loss": 0.0949,
"step": 53
},
{
"epoch": 0.15126050420168066,
"grad_norm": 0.38572150468826294,
"learning_rate": 4.5e-05,
"loss": 0.135,
"step": 54
},
{
"epoch": 0.15406162464985995,
"grad_norm": 0.2600800693035126,
"learning_rate": 4.5833333333333334e-05,
"loss": 0.1692,
"step": 55
},
{
"epoch": 0.1568627450980392,
"grad_norm": 0.3701837360858917,
"learning_rate": 4.666666666666667e-05,
"loss": 0.1291,
"step": 56
},
{
"epoch": 0.15966386554621848,
"grad_norm": 0.34277698397636414,
"learning_rate": 4.75e-05,
"loss": 0.1346,
"step": 57
},
{
"epoch": 0.16246498599439776,
"grad_norm": 0.35784104466438293,
"learning_rate": 4.8333333333333334e-05,
"loss": 0.177,
"step": 58
},
{
"epoch": 0.16526610644257703,
"grad_norm": 0.8476487994194031,
"learning_rate": 4.9166666666666665e-05,
"loss": 0.2581,
"step": 59
},
{
"epoch": 0.16806722689075632,
"grad_norm": 0.8182944655418396,
"learning_rate": 5e-05,
"loss": 0.2639,
"step": 60
},
{
"epoch": 0.17086834733893558,
"grad_norm": 0.29115599393844604,
"learning_rate": 4.999995853979553e-05,
"loss": 0.1691,
"step": 61
},
{
"epoch": 0.17366946778711484,
"grad_norm": 0.7699853777885437,
"learning_rate": 4.9999834159319655e-05,
"loss": 0.264,
"step": 62
},
{
"epoch": 0.17647058823529413,
"grad_norm": 0.7362309694290161,
"learning_rate": 4.999962685898491e-05,
"loss": 0.2508,
"step": 63
},
{
"epoch": 0.1792717086834734,
"grad_norm": 0.4930090010166168,
"learning_rate": 4.999933663947886e-05,
"loss": 0.2159,
"step": 64
},
{
"epoch": 0.18207282913165265,
"grad_norm": 0.3581504821777344,
"learning_rate": 4.999896350176413e-05,
"loss": 0.1278,
"step": 65
},
{
"epoch": 0.18487394957983194,
"grad_norm": 0.5248085260391235,
"learning_rate": 4.999850744707835e-05,
"loss": 0.0941,
"step": 66
},
{
"epoch": 0.1876750700280112,
"grad_norm": 0.3348913788795471,
"learning_rate": 4.9997968476934156e-05,
"loss": 0.1326,
"step": 67
},
{
"epoch": 0.19047619047619047,
"grad_norm": 0.5374695658683777,
"learning_rate": 4.999734659311921e-05,
"loss": 0.095,
"step": 68
},
{
"epoch": 0.19327731092436976,
"grad_norm": 0.28596481680870056,
"learning_rate": 4.9996641797696206e-05,
"loss": 0.1703,
"step": 69
},
{
"epoch": 0.19607843137254902,
"grad_norm": 0.3090248703956604,
"learning_rate": 4.999585409300281e-05,
"loss": 0.1358,
"step": 70
},
{
"epoch": 0.19887955182072828,
"grad_norm": 0.3411775827407837,
"learning_rate": 4.999498348165169e-05,
"loss": 0.1323,
"step": 71
},
{
"epoch": 0.20168067226890757,
"grad_norm": 0.32754117250442505,
"learning_rate": 4.999402996653051e-05,
"loss": 0.1324,
"step": 72
},
{
"epoch": 0.20448179271708683,
"grad_norm": 0.3174399137496948,
"learning_rate": 4.9992993550801905e-05,
"loss": 0.1304,
"step": 73
},
{
"epoch": 0.20728291316526612,
"grad_norm": 0.286386102437973,
"learning_rate": 4.999187423790347e-05,
"loss": 0.1289,
"step": 74
},
{
"epoch": 0.21008403361344538,
"grad_norm": 0.6507259607315063,
"learning_rate": 4.999067203154777e-05,
"loss": 0.2545,
"step": 75
},
{
"epoch": 0.21288515406162464,
"grad_norm": 0.2852478623390198,
"learning_rate": 4.998938693572229e-05,
"loss": 0.1309,
"step": 76
},
{
"epoch": 0.21568627450980393,
"grad_norm": 0.2769582271575928,
"learning_rate": 4.9988018954689465e-05,
"loss": 0.1694,
"step": 77
},
{
"epoch": 0.2184873949579832,
"grad_norm": 0.3016033172607422,
"learning_rate": 4.998656809298663e-05,
"loss": 0.1752,
"step": 78
},
{
"epoch": 0.22128851540616246,
"grad_norm": 0.2839262783527374,
"learning_rate": 4.998503435542604e-05,
"loss": 0.173,
"step": 79
},
{
"epoch": 0.22408963585434175,
"grad_norm": 0.4902794659137726,
"learning_rate": 4.9983417747094816e-05,
"loss": 0.0811,
"step": 80
},
{
"epoch": 0.226890756302521,
"grad_norm": 0.26926159858703613,
"learning_rate": 4.998171827335494e-05,
"loss": 0.1241,
"step": 81
},
{
"epoch": 0.22969187675070027,
"grad_norm": 0.2879544198513031,
"learning_rate": 4.997993593984327e-05,
"loss": 0.1698,
"step": 82
},
{
"epoch": 0.23249299719887956,
"grad_norm": 0.4781981110572815,
"learning_rate": 4.997807075247146e-05,
"loss": 0.0775,
"step": 83
},
{
"epoch": 0.23529411764705882,
"grad_norm": 0.5029699206352234,
"learning_rate": 4.997612271742601e-05,
"loss": 0.215,
"step": 84
},
{
"epoch": 0.23809523809523808,
"grad_norm": 0.27541235089302063,
"learning_rate": 4.9974091841168195e-05,
"loss": 0.1675,
"step": 85
},
{
"epoch": 0.24089635854341737,
"grad_norm": 0.25892165303230286,
"learning_rate": 4.997197813043404e-05,
"loss": 0.1221,
"step": 86
},
{
"epoch": 0.24369747899159663,
"grad_norm": 0.47153759002685547,
"learning_rate": 4.996978159223436e-05,
"loss": 0.0734,
"step": 87
},
{
"epoch": 0.24649859943977592,
"grad_norm": 0.3439052402973175,
"learning_rate": 4.9967502233854654e-05,
"loss": 0.1698,
"step": 88
},
{
"epoch": 0.24929971988795518,
"grad_norm": 1.2146350145339966,
"learning_rate": 4.996514006285514e-05,
"loss": 0.3275,
"step": 89
},
{
"epoch": 0.25210084033613445,
"grad_norm": 0.5989933013916016,
"learning_rate": 4.99626950870707e-05,
"loss": 0.2192,
"step": 90
},
{
"epoch": 0.2549019607843137,
"grad_norm": 0.481522798538208,
"learning_rate": 4.996016731461088e-05,
"loss": 0.0737,
"step": 91
},
{
"epoch": 0.25770308123249297,
"grad_norm": 0.8134711384773254,
"learning_rate": 4.995755675385982e-05,
"loss": 0.2394,
"step": 92
},
{
"epoch": 0.2605042016806723,
"grad_norm": 0.3162464201450348,
"learning_rate": 4.995486341347629e-05,
"loss": 0.1734,
"step": 93
},
{
"epoch": 0.26330532212885155,
"grad_norm": 1.2472445964813232,
"learning_rate": 4.99520873023936e-05,
"loss": 0.331,
"step": 94
},
{
"epoch": 0.2661064425770308,
"grad_norm": 0.4896290600299835,
"learning_rate": 4.994922842981958e-05,
"loss": 0.0775,
"step": 95
},
{
"epoch": 0.2689075630252101,
"grad_norm": 0.6393519043922424,
"learning_rate": 4.9946286805236616e-05,
"loss": 0.2254,
"step": 96
},
{
"epoch": 0.27170868347338933,
"grad_norm": 0.3101232945919037,
"learning_rate": 4.994326243840153e-05,
"loss": 0.1741,
"step": 97
},
{
"epoch": 0.27450980392156865,
"grad_norm": 0.396138995885849,
"learning_rate": 4.994015533934557e-05,
"loss": 0.2115,
"step": 98
},
{
"epoch": 0.2773109243697479,
"grad_norm": 0.4914068877696991,
"learning_rate": 4.993696551837444e-05,
"loss": 0.2207,
"step": 99
},
{
"epoch": 0.2801120448179272,
"grad_norm": 0.31758832931518555,
"learning_rate": 4.9933692986068165e-05,
"loss": 0.1266,
"step": 100
},
{
"epoch": 0.28291316526610644,
"grad_norm": 0.24751000106334686,
"learning_rate": 4.993033775328115e-05,
"loss": 0.1694,
"step": 101
},
{
"epoch": 0.2857142857142857,
"grad_norm": 0.2383459210395813,
"learning_rate": 4.992689983114209e-05,
"loss": 0.17,
"step": 102
},
{
"epoch": 0.28851540616246496,
"grad_norm": 0.3043301999568939,
"learning_rate": 4.9923379231053925e-05,
"loss": 0.1747,
"step": 103
},
{
"epoch": 0.2913165266106443,
"grad_norm": 0.311767041683197,
"learning_rate": 4.9919775964693854e-05,
"loss": 0.1345,
"step": 104
},
{
"epoch": 0.29411764705882354,
"grad_norm": 0.9271151423454285,
"learning_rate": 4.991609004401324e-05,
"loss": 0.3113,
"step": 105
},
{
"epoch": 0.2969187675070028,
"grad_norm": 0.24133501946926117,
"learning_rate": 4.991232148123761e-05,
"loss": 0.1685,
"step": 106
},
{
"epoch": 0.29971988795518206,
"grad_norm": 0.7193834781646729,
"learning_rate": 4.99084702888666e-05,
"loss": 0.2805,
"step": 107
},
{
"epoch": 0.3025210084033613,
"grad_norm": 0.21335840225219727,
"learning_rate": 4.990453647967389e-05,
"loss": 0.1717,
"step": 108
},
{
"epoch": 0.30532212885154064,
"grad_norm": 0.27446791529655457,
"learning_rate": 4.9900520066707215e-05,
"loss": 0.2081,
"step": 109
},
{
"epoch": 0.3081232492997199,
"grad_norm": 0.5605149269104004,
"learning_rate": 4.9896421063288286e-05,
"loss": 0.1085,
"step": 110
},
{
"epoch": 0.31092436974789917,
"grad_norm": 0.2565538287162781,
"learning_rate": 4.989223948301273e-05,
"loss": 0.1717,
"step": 111
},
{
"epoch": 0.3137254901960784,
"grad_norm": 0.5685631632804871,
"learning_rate": 4.988797533975009e-05,
"loss": 0.1095,
"step": 112
},
{
"epoch": 0.3165266106442577,
"grad_norm": 0.25811776518821716,
"learning_rate": 4.9883628647643744e-05,
"loss": 0.173,
"step": 113
},
{
"epoch": 0.31932773109243695,
"grad_norm": 0.3600546717643738,
"learning_rate": 4.9879199421110865e-05,
"loss": 0.1432,
"step": 114
},
{
"epoch": 0.32212885154061627,
"grad_norm": 0.22939907014369965,
"learning_rate": 4.98746876748424e-05,
"loss": 0.2009,
"step": 115
},
{
"epoch": 0.32492997198879553,
"grad_norm": 0.3546235263347626,
"learning_rate": 4.9870093423802964e-05,
"loss": 0.1419,
"step": 116
},
{
"epoch": 0.3277310924369748,
"grad_norm": 0.2727724015712738,
"learning_rate": 4.986541668323086e-05,
"loss": 0.2094,
"step": 117
},
{
"epoch": 0.33053221288515405,
"grad_norm": 0.2383224368095398,
"learning_rate": 4.986065746863797e-05,
"loss": 0.1735,
"step": 118
},
{
"epoch": 0.3333333333333333,
"grad_norm": 0.23489497601985931,
"learning_rate": 4.985581579580973e-05,
"loss": 0.1738,
"step": 119
},
{
"epoch": 0.33613445378151263,
"grad_norm": 0.3256724178791046,
"learning_rate": 4.985089168080509e-05,
"loss": 0.1336,
"step": 120
},
{
"epoch": 0.3389355742296919,
"grad_norm": 0.2290939837694168,
"learning_rate": 4.9845885139956435e-05,
"loss": 0.1732,
"step": 121
},
{
"epoch": 0.34173669467787116,
"grad_norm": 0.2946733236312866,
"learning_rate": 4.984079618986953e-05,
"loss": 0.1377,
"step": 122
},
{
"epoch": 0.3445378151260504,
"grad_norm": 0.9020264744758606,
"learning_rate": 4.983562484742349e-05,
"loss": 0.3229,
"step": 123
},
{
"epoch": 0.3473389355742297,
"grad_norm": 0.2107078731060028,
"learning_rate": 4.983037112977072e-05,
"loss": 0.1738,
"step": 124
},
{
"epoch": 0.35014005602240894,
"grad_norm": 0.21474942564964294,
"learning_rate": 4.982503505433683e-05,
"loss": 0.1698,
"step": 125
},
{
"epoch": 0.35294117647058826,
"grad_norm": 0.3706744909286499,
"learning_rate": 4.98196166388206e-05,
"loss": 0.2061,
"step": 126
},
{
"epoch": 0.3557422969187675,
"grad_norm": 0.19645719230175018,
"learning_rate": 4.981411590119392e-05,
"loss": 0.1687,
"step": 127
},
{
"epoch": 0.3585434173669468,
"grad_norm": 0.4788144528865814,
"learning_rate": 4.980853285970173e-05,
"loss": 0.097,
"step": 128
},
{
"epoch": 0.36134453781512604,
"grad_norm": 0.30618858337402344,
"learning_rate": 4.980286753286195e-05,
"loss": 0.1367,
"step": 129
},
{
"epoch": 0.3641456582633053,
"grad_norm": 0.5136687159538269,
"learning_rate": 4.979711993946543e-05,
"loss": 0.2502,
"step": 130
},
{
"epoch": 0.36694677871148457,
"grad_norm": 0.38218584656715393,
"learning_rate": 4.9791290098575876e-05,
"loss": 0.2166,
"step": 131
},
{
"epoch": 0.3697478991596639,
"grad_norm": 0.3716464936733246,
"learning_rate": 4.978537802952981e-05,
"loss": 0.2071,
"step": 132
},
{
"epoch": 0.37254901960784315,
"grad_norm": 0.21404078602790833,
"learning_rate": 4.977938375193648e-05,
"loss": 0.177,
"step": 133
},
{
"epoch": 0.3753501400560224,
"grad_norm": 0.3054008483886719,
"learning_rate": 4.9773307285677785e-05,
"loss": 0.212,
"step": 134
},
{
"epoch": 0.37815126050420167,
"grad_norm": 0.7401143312454224,
"learning_rate": 4.976714865090827e-05,
"loss": 0.278,
"step": 135
},
{
"epoch": 0.38095238095238093,
"grad_norm": 0.3005881607532501,
"learning_rate": 4.976090786805498e-05,
"loss": 0.2057,
"step": 136
},
{
"epoch": 0.38375350140056025,
"grad_norm": 1.065142035484314,
"learning_rate": 4.975458495781746e-05,
"loss": 0.3504,
"step": 137
},
{
"epoch": 0.3865546218487395,
"grad_norm": 0.291591614484787,
"learning_rate": 4.974817994116764e-05,
"loss": 0.2105,
"step": 138
},
{
"epoch": 0.38935574229691877,
"grad_norm": 0.21302799880504608,
"learning_rate": 4.9741692839349765e-05,
"loss": 0.1722,
"step": 139
},
{
"epoch": 0.39215686274509803,
"grad_norm": 0.3730943500995636,
"learning_rate": 4.973512367388038e-05,
"loss": 0.2368,
"step": 140
},
{
"epoch": 0.3949579831932773,
"grad_norm": 0.25385361909866333,
"learning_rate": 4.9728472466548194e-05,
"loss": 0.2002,
"step": 141
},
{
"epoch": 0.39775910364145656,
"grad_norm": 0.25864890217781067,
"learning_rate": 4.9721739239414034e-05,
"loss": 0.1719,
"step": 142
},
{
"epoch": 0.4005602240896359,
"grad_norm": 0.44104957580566406,
"learning_rate": 4.971492401481079e-05,
"loss": 0.1467,
"step": 143
},
{
"epoch": 0.40336134453781514,
"grad_norm": 0.6197791695594788,
"learning_rate": 4.9708026815343314e-05,
"loss": 0.1173,
"step": 144
},
{
"epoch": 0.4061624649859944,
"grad_norm": 0.6132116317749023,
"learning_rate": 4.970104766388832e-05,
"loss": 0.1187,
"step": 145
},
{
"epoch": 0.40896358543417366,
"grad_norm": 0.28150931000709534,
"learning_rate": 4.969398658359441e-05,
"loss": 0.1739,
"step": 146
},
{
"epoch": 0.4117647058823529,
"grad_norm": 0.41110119223594666,
"learning_rate": 4.968684359788187e-05,
"loss": 0.1441,
"step": 147
},
{
"epoch": 0.41456582633053224,
"grad_norm": 0.2573298513889313,
"learning_rate": 4.967961873044267e-05,
"loss": 0.2043,
"step": 148
},
{
"epoch": 0.4173669467787115,
"grad_norm": 0.5403711199760437,
"learning_rate": 4.967231200524037e-05,
"loss": 0.1091,
"step": 149
},
{
"epoch": 0.42016806722689076,
"grad_norm": 0.25525861978530884,
"learning_rate": 4.966492344651005e-05,
"loss": 0.2077,
"step": 150
},
{
"epoch": 0.42296918767507,
"grad_norm": 0.2501123547554016,
"learning_rate": 4.965745307875819e-05,
"loss": 0.2061,
"step": 151
},
{
"epoch": 0.4257703081232493,
"grad_norm": 0.2235931009054184,
"learning_rate": 4.964990092676263e-05,
"loss": 0.1766,
"step": 152
},
{
"epoch": 0.42857142857142855,
"grad_norm": 0.3031710386276245,
"learning_rate": 4.9642267015572464e-05,
"loss": 0.1371,
"step": 153
},
{
"epoch": 0.43137254901960786,
"grad_norm": 0.48257577419281006,
"learning_rate": 4.9634551370507986e-05,
"loss": 0.0987,
"step": 154
},
{
"epoch": 0.4341736694677871,
"grad_norm": 0.254314124584198,
"learning_rate": 4.962675401716056e-05,
"loss": 0.1627,
"step": 155
},
{
"epoch": 0.4369747899159664,
"grad_norm": 0.46130886673927307,
"learning_rate": 4.9618874981392596e-05,
"loss": 0.0952,
"step": 156
},
{
"epoch": 0.43977591036414565,
"grad_norm": 0.48739495873451233,
"learning_rate": 4.961091428933738e-05,
"loss": 0.2433,
"step": 157
},
{
"epoch": 0.4425770308123249,
"grad_norm": 0.3604317307472229,
"learning_rate": 4.96028719673991e-05,
"loss": 0.2135,
"step": 158
},
{
"epoch": 0.44537815126050423,
"grad_norm": 0.31823596358299255,
"learning_rate": 4.959474804225263e-05,
"loss": 0.209,
"step": 159
},
{
"epoch": 0.4481792717086835,
"grad_norm": 0.4376252591609955,
"learning_rate": 4.958654254084355e-05,
"loss": 0.0907,
"step": 160
},
{
"epoch": 0.45098039215686275,
"grad_norm": 0.5917844772338867,
"learning_rate": 4.9578255490388007e-05,
"loss": 0.2649,
"step": 161
},
{
"epoch": 0.453781512605042,
"grad_norm": 0.33242353796958923,
"learning_rate": 4.956988691837262e-05,
"loss": 0.2071,
"step": 162
},
{
"epoch": 0.4565826330532213,
"grad_norm": 0.19680732488632202,
"learning_rate": 4.956143685255441e-05,
"loss": 0.1706,
"step": 163
},
{
"epoch": 0.45938375350140054,
"grad_norm": 0.19730661809444427,
"learning_rate": 4.955290532096068e-05,
"loss": 0.1656,
"step": 164
},
{
"epoch": 0.46218487394957986,
"grad_norm": 0.24366319179534912,
"learning_rate": 4.9544292351888966e-05,
"loss": 0.129,
"step": 165
},
{
"epoch": 0.4649859943977591,
"grad_norm": 0.20081248879432678,
"learning_rate": 4.95355979739069e-05,
"loss": 0.1659,
"step": 166
},
{
"epoch": 0.4677871148459384,
"grad_norm": 0.24853447079658508,
"learning_rate": 4.9526822215852145e-05,
"loss": 0.1797,
"step": 167
},
{
"epoch": 0.47058823529411764,
"grad_norm": 0.3966836929321289,
"learning_rate": 4.951796510683227e-05,
"loss": 0.2161,
"step": 168
},
{
"epoch": 0.4733893557422969,
"grad_norm": 1.1678446531295776,
"learning_rate": 4.950902667622468e-05,
"loss": 0.3802,
"step": 169
},
{
"epoch": 0.47619047619047616,
"grad_norm": 0.4717535674571991,
"learning_rate": 4.95000069536765e-05,
"loss": 0.222,
"step": 170
},
{
"epoch": 0.4789915966386555,
"grad_norm": 0.2482030987739563,
"learning_rate": 4.9490905969104514e-05,
"loss": 0.1356,
"step": 171
},
{
"epoch": 0.48179271708683474,
"grad_norm": 0.20962095260620117,
"learning_rate": 4.9481723752695006e-05,
"loss": 0.1644,
"step": 172
},
{
"epoch": 0.484593837535014,
"grad_norm": 0.46536725759506226,
"learning_rate": 4.9472460334903703e-05,
"loss": 0.2479,
"step": 173
},
{
"epoch": 0.48739495798319327,
"grad_norm": 0.4335561990737915,
"learning_rate": 4.946311574645565e-05,
"loss": 0.0947,
"step": 174
},
{
"epoch": 0.49019607843137253,
"grad_norm": 0.4413890838623047,
"learning_rate": 4.9453690018345144e-05,
"loss": 0.0965,
"step": 175
},
{
"epoch": 0.49299719887955185,
"grad_norm": 0.17996694147586823,
"learning_rate": 4.944418318183559e-05,
"loss": 0.1686,
"step": 176
},
{
"epoch": 0.4957983193277311,
"grad_norm": 0.17904537916183472,
"learning_rate": 4.943459526845942e-05,
"loss": 0.171,
"step": 177
},
{
"epoch": 0.49859943977591037,
"grad_norm": 0.4312843382358551,
"learning_rate": 4.9424926310017975e-05,
"loss": 0.0961,
"step": 178
},
{
"epoch": 0.5014005602240896,
"grad_norm": 0.27155429124832153,
"learning_rate": 4.941517633858141e-05,
"loss": 0.2079,
"step": 179
},
{
"epoch": 0.5042016806722689,
"grad_norm": 0.2025201916694641,
"learning_rate": 4.9405345386488614e-05,
"loss": 0.1742,
"step": 180
},
{
"epoch": 0.5070028011204482,
"grad_norm": 0.20862695574760437,
"learning_rate": 4.939543348634703e-05,
"loss": 0.1693,
"step": 181
},
{
"epoch": 0.5098039215686274,
"grad_norm": 0.43479472398757935,
"learning_rate": 4.9385440671032626e-05,
"loss": 0.0942,
"step": 182
},
{
"epoch": 0.5126050420168067,
"grad_norm": 0.2722318470478058,
"learning_rate": 4.937536697368971e-05,
"loss": 0.2083,
"step": 183
},
{
"epoch": 0.5154061624649859,
"grad_norm": 0.4572315216064453,
"learning_rate": 4.936521242773091e-05,
"loss": 0.2492,
"step": 184
},
{
"epoch": 0.5182072829131653,
"grad_norm": 0.5211023092269897,
"learning_rate": 4.9354977066836986e-05,
"loss": 0.2559,
"step": 185
},
{
"epoch": 0.5210084033613446,
"grad_norm": 0.4220978319644928,
"learning_rate": 4.934466092495673e-05,
"loss": 0.0944,
"step": 186
},
{
"epoch": 0.5238095238095238,
"grad_norm": 0.25524213910102844,
"learning_rate": 4.9334264036306916e-05,
"loss": 0.1332,
"step": 187
},
{
"epoch": 0.5266106442577031,
"grad_norm": 0.1635034680366516,
"learning_rate": 4.93237864353721e-05,
"loss": 0.1691,
"step": 188
},
{
"epoch": 0.5294117647058824,
"grad_norm": 0.1833125352859497,
"learning_rate": 4.931322815690457e-05,
"loss": 0.172,
"step": 189
},
{
"epoch": 0.5322128851540616,
"grad_norm": 0.22643063962459564,
"learning_rate": 4.930258923592418e-05,
"loss": 0.1768,
"step": 190
},
{
"epoch": 0.5350140056022409,
"grad_norm": 0.4311389923095703,
"learning_rate": 4.9291869707718304e-05,
"loss": 0.094,
"step": 191
},
{
"epoch": 0.5378151260504201,
"grad_norm": 0.41472041606903076,
"learning_rate": 4.9281069607841624e-05,
"loss": 0.0918,
"step": 192
},
{
"epoch": 0.5406162464985994,
"grad_norm": 0.19142650067806244,
"learning_rate": 4.927018897211609e-05,
"loss": 0.1737,
"step": 193
},
{
"epoch": 0.5434173669467787,
"grad_norm": 0.27896803617477417,
"learning_rate": 4.925922783663079e-05,
"loss": 0.2096,
"step": 194
},
{
"epoch": 0.5462184873949579,
"grad_norm": 0.2846761643886566,
"learning_rate": 4.924818623774178e-05,
"loss": 0.1254,
"step": 195
},
{
"epoch": 0.5490196078431373,
"grad_norm": 0.1776442974805832,
"learning_rate": 4.923706421207202e-05,
"loss": 0.1715,
"step": 196
},
{
"epoch": 0.5518207282913166,
"grad_norm": 0.2530427873134613,
"learning_rate": 4.922586179651124e-05,
"loss": 0.1286,
"step": 197
},
{
"epoch": 0.5546218487394958,
"grad_norm": 0.3111589550971985,
"learning_rate": 4.9214579028215776e-05,
"loss": 0.209,
"step": 198
},
{
"epoch": 0.5574229691876751,
"grad_norm": 0.4025686979293823,
"learning_rate": 4.920321594460852e-05,
"loss": 0.0844,
"step": 199
},
{
"epoch": 0.5602240896358543,
"grad_norm": 0.4007948040962219,
"learning_rate": 4.9191772583378705e-05,
"loss": 0.0839,
"step": 200
},
{
"epoch": 0.5630252100840336,
"grad_norm": 0.24552220106124878,
"learning_rate": 4.9180248982481876e-05,
"loss": 0.1228,
"step": 201
},
{
"epoch": 0.5658263305322129,
"grad_norm": 0.3994346261024475,
"learning_rate": 4.916864518013971e-05,
"loss": 0.0792,
"step": 202
},
{
"epoch": 0.5686274509803921,
"grad_norm": 0.24182085692882538,
"learning_rate": 4.915696121483986e-05,
"loss": 0.1216,
"step": 203
},
{
"epoch": 0.5714285714285714,
"grad_norm": 0.6845493912696838,
"learning_rate": 4.9145197125335916e-05,
"loss": 0.271,
"step": 204
},
{
"epoch": 0.5742296918767507,
"grad_norm": 0.35140976309776306,
"learning_rate": 4.91333529506472e-05,
"loss": 0.1824,
"step": 205
},
{
"epoch": 0.5770308123249299,
"grad_norm": 0.2534499168395996,
"learning_rate": 4.912142873005866e-05,
"loss": 0.1623,
"step": 206
},
{
"epoch": 0.5798319327731093,
"grad_norm": 0.21057064831256866,
"learning_rate": 4.910942450312075e-05,
"loss": 0.1185,
"step": 207
},
{
"epoch": 0.5826330532212886,
"grad_norm": 0.2223557084798813,
"learning_rate": 4.909734030964929e-05,
"loss": 0.1259,
"step": 208
},
{
"epoch": 0.5854341736694678,
"grad_norm": 0.2970251739025116,
"learning_rate": 4.9085176189725314e-05,
"loss": 0.1735,
"step": 209
},
{
"epoch": 0.5882352941176471,
"grad_norm": 0.8583146929740906,
"learning_rate": 4.907293218369499e-05,
"loss": 0.2865,
"step": 210
},
{
"epoch": 0.5910364145658263,
"grad_norm": 0.2618134617805481,
"learning_rate": 4.906060833216942e-05,
"loss": 0.1733,
"step": 211
},
{
"epoch": 0.5938375350140056,
"grad_norm": 0.3876262605190277,
"learning_rate": 4.904820467602458e-05,
"loss": 0.0753,
"step": 212
},
{
"epoch": 0.5966386554621849,
"grad_norm": 0.7885448932647705,
"learning_rate": 4.90357212564011e-05,
"loss": 0.2769,
"step": 213
},
{
"epoch": 0.5994397759103641,
"grad_norm": 0.44926995038986206,
"learning_rate": 4.9023158114704206e-05,
"loss": 0.2188,
"step": 214
},
{
"epoch": 0.6022408963585434,
"grad_norm": 1.0731271505355835,
"learning_rate": 4.901051529260352e-05,
"loss": 0.3633,
"step": 215
},
{
"epoch": 0.6050420168067226,
"grad_norm": 0.38666459918022156,
"learning_rate": 4.899779283203296e-05,
"loss": 0.0786,
"step": 216
},
{
"epoch": 0.6078431372549019,
"grad_norm": 0.40130844712257385,
"learning_rate": 4.89849907751906e-05,
"loss": 0.2174,
"step": 217
},
{
"epoch": 0.6106442577030813,
"grad_norm": 0.22078561782836914,
"learning_rate": 4.897210916453851e-05,
"loss": 0.131,
"step": 218
},
{
"epoch": 0.6134453781512605,
"grad_norm": 0.3916260600090027,
"learning_rate": 4.895914804280262e-05,
"loss": 0.0827,
"step": 219
},
{
"epoch": 0.6162464985994398,
"grad_norm": 0.5899614095687866,
"learning_rate": 4.89461074529726e-05,
"loss": 0.2647,
"step": 220
},
{
"epoch": 0.6190476190476191,
"grad_norm": 0.37688153982162476,
"learning_rate": 4.893298743830168e-05,
"loss": 0.2185,
"step": 221
},
{
"epoch": 0.6218487394957983,
"grad_norm": 0.19744780659675598,
"learning_rate": 4.891978804230656e-05,
"loss": 0.1755,
"step": 222
},
{
"epoch": 0.6246498599439776,
"grad_norm": 0.23267637193202972,
"learning_rate": 4.890650930876719e-05,
"loss": 0.1316,
"step": 223
},
{
"epoch": 0.6274509803921569,
"grad_norm": 0.1993289738893509,
"learning_rate": 4.889315128172669e-05,
"loss": 0.1755,
"step": 224
},
{
"epoch": 0.6302521008403361,
"grad_norm": 0.22977744042873383,
"learning_rate": 4.88797140054912e-05,
"loss": 0.1308,
"step": 225
},
{
"epoch": 0.6330532212885154,
"grad_norm": 0.3395256996154785,
"learning_rate": 4.88661975246297e-05,
"loss": 0.2184,
"step": 226
},
{
"epoch": 0.6358543417366946,
"grad_norm": 0.276090145111084,
"learning_rate": 4.8852601883973846e-05,
"loss": 0.1294,
"step": 227
},
{
"epoch": 0.6386554621848739,
"grad_norm": 0.2874520421028137,
"learning_rate": 4.883892712861791e-05,
"loss": 0.2176,
"step": 228
},
{
"epoch": 0.6414565826330533,
"grad_norm": 0.3347153663635254,
"learning_rate": 4.882517330391854e-05,
"loss": 0.2182,
"step": 229
},
{
"epoch": 0.6442577030812325,
"grad_norm": 0.40832483768463135,
"learning_rate": 4.8811340455494624e-05,
"loss": 0.0955,
"step": 230
},
{
"epoch": 0.6470588235294118,
"grad_norm": 0.2539953589439392,
"learning_rate": 4.879742862922721e-05,
"loss": 0.1308,
"step": 231
},
{
"epoch": 0.6498599439775911,
"grad_norm": 0.26753100752830505,
"learning_rate": 4.8783437871259254e-05,
"loss": 0.2096,
"step": 232
},
{
"epoch": 0.6526610644257703,
"grad_norm": 0.16798768937587738,
"learning_rate": 4.876936822799553e-05,
"loss": 0.1713,
"step": 233
},
{
"epoch": 0.6554621848739496,
"grad_norm": 0.2817080020904541,
"learning_rate": 4.875521974610247e-05,
"loss": 0.2118,
"step": 234
},
{
"epoch": 0.6582633053221288,
"grad_norm": 0.40791934728622437,
"learning_rate": 4.874099247250798e-05,
"loss": 0.0928,
"step": 235
},
{
"epoch": 0.6610644257703081,
"grad_norm": 0.24818055331707,
"learning_rate": 4.8726686454401325e-05,
"loss": 0.2097,
"step": 236
},
{
"epoch": 0.6638655462184874,
"grad_norm": 0.420173317193985,
"learning_rate": 4.8712301739232935e-05,
"loss": 0.2445,
"step": 237
},
{
"epoch": 0.6666666666666666,
"grad_norm": 0.2433900535106659,
"learning_rate": 4.869783837471427e-05,
"loss": 0.1331,
"step": 238
},
{
"epoch": 0.6694677871148459,
"grad_norm": 0.3094242811203003,
"learning_rate": 4.8683296408817644e-05,
"loss": 0.2125,
"step": 239
},
{
"epoch": 0.6722689075630253,
"grad_norm": 0.24864792823791504,
"learning_rate": 4.8668675889776095e-05,
"loss": 0.1326,
"step": 240
},
{
"epoch": 0.6750700280112045,
"grad_norm": 0.16234812140464783,
"learning_rate": 4.8653976866083206e-05,
"loss": 0.17,
"step": 241
},
{
"epoch": 0.6778711484593838,
"grad_norm": 0.30067870020866394,
"learning_rate": 4.863919938649293e-05,
"loss": 0.206,
"step": 242
},
{
"epoch": 0.680672268907563,
"grad_norm": 0.43154123425483704,
"learning_rate": 4.862434350001945e-05,
"loss": 0.2505,
"step": 243
},
{
"epoch": 0.6834733893557423,
"grad_norm": 0.4572393596172333,
"learning_rate": 4.860940925593703e-05,
"loss": 0.2417,
"step": 244
},
{
"epoch": 0.6862745098039216,
"grad_norm": 0.2604037821292877,
"learning_rate": 4.85943967037798e-05,
"loss": 0.1362,
"step": 245
},
{
"epoch": 0.6890756302521008,
"grad_norm": 0.2577713429927826,
"learning_rate": 4.857930589334164e-05,
"loss": 0.1339,
"step": 246
},
{
"epoch": 0.6918767507002801,
"grad_norm": 0.17431576550006866,
"learning_rate": 4.8564136874676e-05,
"loss": 0.1719,
"step": 247
},
{
"epoch": 0.6946778711484594,
"grad_norm": 0.271586537361145,
"learning_rate": 4.854888969809573e-05,
"loss": 0.1349,
"step": 248
},
{
"epoch": 0.6974789915966386,
"grad_norm": 0.2800721824169159,
"learning_rate": 4.8533564414172915e-05,
"loss": 0.1354,
"step": 249
},
{
"epoch": 0.7002801120448179,
"grad_norm": 0.388351172208786,
"learning_rate": 4.851816107373871e-05,
"loss": 0.2419,
"step": 250
},
{
"epoch": 0.7030812324929971,
"grad_norm": 0.2688753604888916,
"learning_rate": 4.850267972788316e-05,
"loss": 0.1368,
"step": 251
},
{
"epoch": 0.7058823529411765,
"grad_norm": 0.43289467692375183,
"learning_rate": 4.848712042795505e-05,
"loss": 0.0977,
"step": 252
},
{
"epoch": 0.7086834733893558,
"grad_norm": 0.3105364739894867,
"learning_rate": 4.847148322556171e-05,
"loss": 0.218,
"step": 253
},
{
"epoch": 0.711484593837535,
"grad_norm": 0.44322580099105835,
"learning_rate": 4.8455768172568886e-05,
"loss": 0.2419,
"step": 254
},
{
"epoch": 0.7142857142857143,
"grad_norm": 0.8140190839767456,
"learning_rate": 4.843997532110051e-05,
"loss": 0.3223,
"step": 255
},
{
"epoch": 0.7170868347338936,
"grad_norm": 0.26502156257629395,
"learning_rate": 4.842410472353858e-05,
"loss": 0.1313,
"step": 256
},
{
"epoch": 0.7198879551820728,
"grad_norm": 0.8722953200340271,
"learning_rate": 4.840815643252294e-05,
"loss": 0.3257,
"step": 257
},
{
"epoch": 0.7226890756302521,
"grad_norm": 0.2759450376033783,
"learning_rate": 4.839213050095116e-05,
"loss": 0.1385,
"step": 258
},
{
"epoch": 0.7254901960784313,
"grad_norm": 0.17727164924144745,
"learning_rate": 4.83760269819783e-05,
"loss": 0.1713,
"step": 259
},
{
"epoch": 0.7282913165266106,
"grad_norm": 0.20785532891750336,
"learning_rate": 4.835984592901678e-05,
"loss": 0.1776,
"step": 260
},
{
"epoch": 0.7310924369747899,
"grad_norm": 0.5836495757102966,
"learning_rate": 4.834358739573618e-05,
"loss": 0.2779,
"step": 261
},
{
"epoch": 0.7338935574229691,
"grad_norm": 0.33494001626968384,
"learning_rate": 4.8327251436063064e-05,
"loss": 0.2319,
"step": 262
},
{
"epoch": 0.7366946778711485,
"grad_norm": 0.29367175698280334,
"learning_rate": 4.831083810418082e-05,
"loss": 0.1383,
"step": 263
},
{
"epoch": 0.7394957983193278,
"grad_norm": 0.31937041878700256,
"learning_rate": 4.8294347454529445e-05,
"loss": 0.1407,
"step": 264
},
{
"epoch": 0.742296918767507,
"grad_norm": 0.48022958636283875,
"learning_rate": 4.82777795418054e-05,
"loss": 0.1067,
"step": 265
},
{
"epoch": 0.7450980392156863,
"grad_norm": 0.48949047923088074,
"learning_rate": 4.826113442096141e-05,
"loss": 0.1079,
"step": 266
},
{
"epoch": 0.7478991596638656,
"grad_norm": 0.5554994344711304,
"learning_rate": 4.8244412147206284e-05,
"loss": 0.2744,
"step": 267
},
{
"epoch": 0.7507002801120448,
"grad_norm": 0.22763153910636902,
"learning_rate": 4.822761277600474e-05,
"loss": 0.2015,
"step": 268
},
{
"epoch": 0.7535014005602241,
"grad_norm": 0.45823562145233154,
"learning_rate": 4.821073636307719e-05,
"loss": 0.1057,
"step": 269
},
{
"epoch": 0.7563025210084033,
"grad_norm": 0.21038657426834106,
"learning_rate": 4.819378296439961e-05,
"loss": 0.1802,
"step": 270
},
{
"epoch": 0.7591036414565826,
"grad_norm": 0.21373850107192993,
"learning_rate": 4.8176752636203314e-05,
"loss": 0.1706,
"step": 271
},
{
"epoch": 0.7619047619047619,
"grad_norm": 0.2585920989513397,
"learning_rate": 4.815964543497476e-05,
"loss": 0.2091,
"step": 272
},
{
"epoch": 0.7647058823529411,
"grad_norm": 0.4250545799732208,
"learning_rate": 4.81424614174554e-05,
"loss": 0.2504,
"step": 273
},
{
"epoch": 0.7675070028011205,
"grad_norm": 0.350320041179657,
"learning_rate": 4.8125200640641455e-05,
"loss": 0.2373,
"step": 274
},
{
"epoch": 0.7703081232492998,
"grad_norm": 0.2049403339624405,
"learning_rate": 4.8107863161783773e-05,
"loss": 0.1697,
"step": 275
},
{
"epoch": 0.773109243697479,
"grad_norm": 0.20530298352241516,
"learning_rate": 4.8090449038387564e-05,
"loss": 0.1794,
"step": 276
},
{
"epoch": 0.7759103641456583,
"grad_norm": 0.18878710269927979,
"learning_rate": 4.8072958328212294e-05,
"loss": 0.1672,
"step": 277
},
{
"epoch": 0.7787114845938375,
"grad_norm": 0.23467376828193665,
"learning_rate": 4.805539108927142e-05,
"loss": 0.2107,
"step": 278
},
{
"epoch": 0.7815126050420168,
"grad_norm": 0.3365757465362549,
"learning_rate": 4.8037747379832265e-05,
"loss": 0.1352,
"step": 279
},
{
"epoch": 0.7843137254901961,
"grad_norm": 0.47438693046569824,
"learning_rate": 4.8020027258415764e-05,
"loss": 0.1032,
"step": 280
},
{
"epoch": 0.7871148459383753,
"grad_norm": 0.29125773906707764,
"learning_rate": 4.8002230783796315e-05,
"loss": 0.1374,
"step": 281
},
{
"epoch": 0.7899159663865546,
"grad_norm": 0.597076952457428,
"learning_rate": 4.798435801500154e-05,
"loss": 0.2789,
"step": 282
},
{
"epoch": 0.7927170868347339,
"grad_norm": 0.40489983558654785,
"learning_rate": 4.7966409011312145e-05,
"loss": 0.2436,
"step": 283
},
{
"epoch": 0.7955182072829131,
"grad_norm": 0.2765220105648041,
"learning_rate": 4.7948383832261665e-05,
"loss": 0.1397,
"step": 284
},
{
"epoch": 0.7983193277310925,
"grad_norm": 0.28682276606559753,
"learning_rate": 4.793028253763633e-05,
"loss": 0.1353,
"step": 285
},
{
"epoch": 0.8011204481792717,
"grad_norm": 0.2538602352142334,
"learning_rate": 4.791210518747479e-05,
"loss": 0.2152,
"step": 286
},
{
"epoch": 0.803921568627451,
"grad_norm": 0.2573007643222809,
"learning_rate": 4.7893851842067984e-05,
"loss": 0.2034,
"step": 287
},
{
"epoch": 0.8067226890756303,
"grad_norm": 0.2098461240530014,
"learning_rate": 4.7875522561958906e-05,
"loss": 0.1647,
"step": 288
},
{
"epoch": 0.8095238095238095,
"grad_norm": 0.20104509592056274,
"learning_rate": 4.785711740794242e-05,
"loss": 0.1711,
"step": 289
},
{
"epoch": 0.8123249299719888,
"grad_norm": 0.17355301976203918,
"learning_rate": 4.783863644106502e-05,
"loss": 0.1701,
"step": 290
},
{
"epoch": 0.8151260504201681,
"grad_norm": 0.4620436728000641,
"learning_rate": 4.782007972262471e-05,
"loss": 0.1005,
"step": 291
},
{
"epoch": 0.8179271708683473,
"grad_norm": 0.18848982453346252,
"learning_rate": 4.7801447314170695e-05,
"loss": 0.1764,
"step": 292
},
{
"epoch": 0.8207282913165266,
"grad_norm": 0.2763855457305908,
"learning_rate": 4.7782739277503266e-05,
"loss": 0.1372,
"step": 293
},
{
"epoch": 0.8235294117647058,
"grad_norm": 0.29234346747398376,
"learning_rate": 4.776395567467353e-05,
"loss": 0.1395,
"step": 294
},
{
"epoch": 0.8263305322128851,
"grad_norm": 0.30519282817840576,
"learning_rate": 4.7745096567983256e-05,
"loss": 0.2119,
"step": 295
},
{
"epoch": 0.8291316526610645,
"grad_norm": 0.16128459572792053,
"learning_rate": 4.772616201998464e-05,
"loss": 0.1722,
"step": 296
},
{
"epoch": 0.8319327731092437,
"grad_norm": 0.43482664227485657,
"learning_rate": 4.770715209348009e-05,
"loss": 0.0938,
"step": 297
},
{
"epoch": 0.834733893557423,
"grad_norm": 0.19980540871620178,
"learning_rate": 4.768806685152206e-05,
"loss": 0.1747,
"step": 298
},
{
"epoch": 0.8375350140056023,
"grad_norm": 0.23939190804958344,
"learning_rate": 4.766890635741278e-05,
"loss": 0.1334,
"step": 299
},
{
"epoch": 0.8403361344537815,
"grad_norm": 0.40733805298805237,
"learning_rate": 4.76496706747041e-05,
"loss": 0.0893,
"step": 300
},
{
"epoch": 0.8431372549019608,
"grad_norm": 0.22405779361724854,
"learning_rate": 4.763035986719722e-05,
"loss": 0.1742,
"step": 301
},
{
"epoch": 0.84593837535014,
"grad_norm": 0.23476524651050568,
"learning_rate": 4.761097399894257e-05,
"loss": 0.129,
"step": 302
},
{
"epoch": 0.8487394957983193,
"grad_norm": 0.2294142097234726,
"learning_rate": 4.7591513134239505e-05,
"loss": 0.13,
"step": 303
},
{
"epoch": 0.8515406162464986,
"grad_norm": 0.24955376982688904,
"learning_rate": 4.757197733763615e-05,
"loss": 0.1777,
"step": 304
},
{
"epoch": 0.8543417366946778,
"grad_norm": 0.19835838675498962,
"learning_rate": 4.7552366673929136e-05,
"loss": 0.1683,
"step": 305
},
{
"epoch": 0.8571428571428571,
"grad_norm": 0.2246866673231125,
"learning_rate": 4.7532681208163444e-05,
"loss": 0.167,
"step": 306
},
{
"epoch": 0.8599439775910365,
"grad_norm": 0.2068600207567215,
"learning_rate": 4.751292100563215e-05,
"loss": 0.17,
"step": 307
},
{
"epoch": 0.8627450980392157,
"grad_norm": 0.2158859223127365,
"learning_rate": 4.7493086131876216e-05,
"loss": 0.166,
"step": 308
},
{
"epoch": 0.865546218487395,
"grad_norm": 0.44647109508514404,
"learning_rate": 4.747317665268427e-05,
"loss": 0.2246,
"step": 309
},
{
"epoch": 0.8683473389355743,
"grad_norm": 0.38463255763053894,
"learning_rate": 4.74531926340924e-05,
"loss": 0.0782,
"step": 310
},
{
"epoch": 0.8711484593837535,
"grad_norm": 0.20261278748512268,
"learning_rate": 4.743313414238394e-05,
"loss": 0.1246,
"step": 311
},
{
"epoch": 0.8739495798319328,
"grad_norm": 0.24954895675182343,
"learning_rate": 4.74130012440892e-05,
"loss": 0.1735,
"step": 312
},
{
"epoch": 0.876750700280112,
"grad_norm": 0.3798324167728424,
"learning_rate": 4.7392794005985326e-05,
"loss": 0.0757,
"step": 313
},
{
"epoch": 0.8795518207282913,
"grad_norm": 0.1796898990869522,
"learning_rate": 4.7372512495096e-05,
"loss": 0.1273,
"step": 314
},
{
"epoch": 0.8823529411764706,
"grad_norm": 0.675372838973999,
"learning_rate": 4.735215677869128e-05,
"loss": 0.2697,
"step": 315
},
{
"epoch": 0.8851540616246498,
"grad_norm": 1.0348286628723145,
"learning_rate": 4.733172692428734e-05,
"loss": 0.3294,
"step": 316
},
{
"epoch": 0.8879551820728291,
"grad_norm": 0.22474908828735352,
"learning_rate": 4.731122299964625e-05,
"loss": 0.1263,
"step": 317
},
{
"epoch": 0.8907563025210085,
"grad_norm": 0.4402739107608795,
"learning_rate": 4.7290645072775764e-05,
"loss": 0.2248,
"step": 318
},
{
"epoch": 0.8935574229691877,
"grad_norm": 0.22885151207447052,
"learning_rate": 4.726999321192908e-05,
"loss": 0.1709,
"step": 319
},
{
"epoch": 0.896358543417367,
"grad_norm": 0.1985294669866562,
"learning_rate": 4.7249267485604644e-05,
"loss": 0.1238,
"step": 320
},
{
"epoch": 0.8991596638655462,
"grad_norm": 0.3767232298851013,
"learning_rate": 4.7228467962545866e-05,
"loss": 0.0794,
"step": 321
},
{
"epoch": 0.9019607843137255,
"grad_norm": 0.20609763264656067,
"learning_rate": 4.720759471174096e-05,
"loss": 0.1668,
"step": 322
},
{
"epoch": 0.9047619047619048,
"grad_norm": 0.6048465371131897,
"learning_rate": 4.7186647802422644e-05,
"loss": 0.2622,
"step": 323
},
{
"epoch": 0.907563025210084,
"grad_norm": 0.23643067479133606,
"learning_rate": 4.7165627304068e-05,
"loss": 0.1786,
"step": 324
},
{
"epoch": 0.9103641456582633,
"grad_norm": 0.3819364011287689,
"learning_rate": 4.714453328639814e-05,
"loss": 0.0801,
"step": 325
},
{
"epoch": 0.9131652661064426,
"grad_norm": 0.382179856300354,
"learning_rate": 4.712336581937805e-05,
"loss": 0.0812,
"step": 326
},
{
"epoch": 0.9159663865546218,
"grad_norm": 0.20134811103343964,
"learning_rate": 4.710212497321633e-05,
"loss": 0.1285,
"step": 327
},
{
"epoch": 0.9187675070028011,
"grad_norm": 0.3812258541584015,
"learning_rate": 4.7080810818364974e-05,
"loss": 0.0812,
"step": 328
},
{
"epoch": 0.9215686274509803,
"grad_norm": 0.6299641132354736,
"learning_rate": 4.7059423425519105e-05,
"loss": 0.2613,
"step": 329
},
{
"epoch": 0.9243697478991597,
"grad_norm": 0.20863133668899536,
"learning_rate": 4.703796286561679e-05,
"loss": 0.1234,
"step": 330
},
{
"epoch": 0.927170868347339,
"grad_norm": 0.5638542175292969,
"learning_rate": 4.7016429209838764e-05,
"loss": 0.2572,
"step": 331
},
{
"epoch": 0.9299719887955182,
"grad_norm": 0.8459420800209045,
"learning_rate": 4.6994822529608204e-05,
"loss": 0.31,
"step": 332
},
{
"epoch": 0.9327731092436975,
"grad_norm": 0.20429813861846924,
"learning_rate": 4.697314289659051e-05,
"loss": 0.1265,
"step": 333
},
{
"epoch": 0.9355742296918768,
"grad_norm": 0.5639265179634094,
"learning_rate": 4.695139038269303e-05,
"loss": 0.2573,
"step": 334
},
{
"epoch": 0.938375350140056,
"grad_norm": 0.24481046199798584,
"learning_rate": 4.6929565060064864e-05,
"loss": 0.1265,
"step": 335
},
{
"epoch": 0.9411764705882353,
"grad_norm": 0.2361876666545868,
"learning_rate": 4.690766700109659e-05,
"loss": 0.171,
"step": 336
},
{
"epoch": 0.9439775910364145,
"grad_norm": 0.5476352572441101,
"learning_rate": 4.688569627842007e-05,
"loss": 0.2601,
"step": 337
},
{
"epoch": 0.9467787114845938,
"grad_norm": 0.4084392786026001,
"learning_rate": 4.686365296490813e-05,
"loss": 0.0887,
"step": 338
},
{
"epoch": 0.9495798319327731,
"grad_norm": 0.4765351414680481,
"learning_rate": 4.684153713367442e-05,
"loss": 0.2469,
"step": 339
},
{
"epoch": 0.9523809523809523,
"grad_norm": 0.24255749583244324,
"learning_rate": 4.681934885807308e-05,
"loss": 0.1327,
"step": 340
},
{
"epoch": 0.9551820728291317,
"grad_norm": 0.2432311475276947,
"learning_rate": 4.6797088211698524e-05,
"loss": 0.1349,
"step": 341
},
{
"epoch": 0.957983193277311,
"grad_norm": 0.41482043266296387,
"learning_rate": 4.677475526838526e-05,
"loss": 0.0934,
"step": 342
},
{
"epoch": 0.9607843137254902,
"grad_norm": 0.4962022006511688,
"learning_rate": 4.675235010220754e-05,
"loss": 0.2515,
"step": 343
},
{
"epoch": 0.9635854341736695,
"grad_norm": 0.26970216631889343,
"learning_rate": 4.672987278747919e-05,
"loss": 0.2126,
"step": 344
},
{
"epoch": 0.9663865546218487,
"grad_norm": 0.1891782432794571,
"learning_rate": 4.6707323398753346e-05,
"loss": 0.1656,
"step": 345
},
{
"epoch": 0.969187675070028,
"grad_norm": 0.18780910968780518,
"learning_rate": 4.668470201082218e-05,
"loss": 0.1719,
"step": 346
},
{
"epoch": 0.9719887955182073,
"grad_norm": 0.6354146599769592,
"learning_rate": 4.6662008698716675e-05,
"loss": 0.2906,
"step": 347
},
{
"epoch": 0.9747899159663865,
"grad_norm": 0.19006231427192688,
"learning_rate": 4.663924353770639e-05,
"loss": 0.1676,
"step": 348
},
{
"epoch": 0.9775910364145658,
"grad_norm": 0.1768893450498581,
"learning_rate": 4.6616406603299176e-05,
"loss": 0.1664,
"step": 349
},
{
"epoch": 0.9803921568627451,
"grad_norm": 0.19590170681476593,
"learning_rate": 4.6593497971240956e-05,
"loss": 0.1647,
"step": 350
},
{
"epoch": 0.9831932773109243,
"grad_norm": 0.4365139901638031,
"learning_rate": 4.657051771751546e-05,
"loss": 0.0991,
"step": 351
},
{
"epoch": 0.9859943977591037,
"grad_norm": 0.7374207377433777,
"learning_rate": 4.654746591834396e-05,
"loss": 0.31,
"step": 352
},
{
"epoch": 0.988795518207283,
"grad_norm": 0.2521171271800995,
"learning_rate": 4.652434265018504e-05,
"loss": 0.1356,
"step": 353
},
{
"epoch": 0.9915966386554622,
"grad_norm": 0.2702823281288147,
"learning_rate": 4.6501147989734346e-05,
"loss": 0.2081,
"step": 354
},
{
"epoch": 0.9943977591036415,
"grad_norm": 0.24237488210201263,
"learning_rate": 4.647788201392429e-05,
"loss": 0.175,
"step": 355
},
{
"epoch": 0.9971988795518207,
"grad_norm": 0.2869884669780731,
"learning_rate": 4.645454479992386e-05,
"loss": 0.1384,
"step": 356
},
{
"epoch": 1.0,
"grad_norm": 0.4562254548072815,
"learning_rate": 4.64311364251383e-05,
"loss": 0.1006,
"step": 357
},
{
"epoch": 1.0,
"eval_f1 (minor class)": 0.12357723577235774,
"eval_loss": 0.1737803816795349,
"eval_roc_auc": 0.5386335507088105,
"eval_runtime": 2.9217,
"eval_samples_per_second": 433.989,
"eval_steps_per_second": 13.69,
"step": 357
},
{
"epoch": 1.0028011204481793,
"grad_norm": 0.2548682689666748,
"learning_rate": 4.6407656967208876e-05,
"loss": 0.2014,
"step": 358
},
{
"epoch": 1.0056022408963585,
"grad_norm": 0.2920522689819336,
"learning_rate": 4.638410650401267e-05,
"loss": 0.144,
"step": 359
},
{
"epoch": 1.0084033613445378,
"grad_norm": 0.44480934739112854,
"learning_rate": 4.6360485113662216e-05,
"loss": 0.0998,
"step": 360
},
{
"epoch": 1.011204481792717,
"grad_norm": 0.44798406958580017,
"learning_rate": 4.633679287450534e-05,
"loss": 0.0989,
"step": 361
},
{
"epoch": 1.0140056022408963,
"grad_norm": 0.21220609545707703,
"learning_rate": 4.631302986512485e-05,
"loss": 0.1732,
"step": 362
},
{
"epoch": 1.0168067226890756,
"grad_norm": 0.4394678771495819,
"learning_rate": 4.628919616433827e-05,
"loss": 0.2458,
"step": 363
},
{
"epoch": 1.0196078431372548,
"grad_norm": 0.6233344674110413,
"learning_rate": 4.6265291851197626e-05,
"loss": 0.2756,
"step": 364
},
{
"epoch": 1.022408963585434,
"grad_norm": 0.43582528829574585,
"learning_rate": 4.6241317004989126e-05,
"loss": 0.2477,
"step": 365
},
{
"epoch": 1.0252100840336134,
"grad_norm": 0.22646282613277435,
"learning_rate": 4.621727170523293e-05,
"loss": 0.1612,
"step": 366
},
{
"epoch": 1.0280112044817926,
"grad_norm": 0.2966563403606415,
"learning_rate": 4.619315603168289e-05,
"loss": 0.1287,
"step": 367
},
{
"epoch": 1.0308123249299719,
"grad_norm": 0.45250192284584045,
"learning_rate": 4.6168970064326266e-05,
"loss": 0.0975,
"step": 368
},
{
"epoch": 1.0336134453781514,
"grad_norm": 1.0906392335891724,
"learning_rate": 4.614471388338346e-05,
"loss": 0.3621,
"step": 369
},
{
"epoch": 1.0364145658263306,
"grad_norm": 0.3004130721092224,
"learning_rate": 4.6120387569307775e-05,
"loss": 0.1335,
"step": 370
},
{
"epoch": 1.0392156862745099,
"grad_norm": 0.20164428651332855,
"learning_rate": 4.609599120278514e-05,
"loss": 0.1761,
"step": 371
},
{
"epoch": 1.0420168067226891,
"grad_norm": 0.4512525796890259,
"learning_rate": 4.6071524864733794e-05,
"loss": 0.0978,
"step": 372
},
{
"epoch": 1.0448179271708684,
"grad_norm": 0.2054992914199829,
"learning_rate": 4.604698863630411e-05,
"loss": 0.1603,
"step": 373
},
{
"epoch": 1.0476190476190477,
"grad_norm": 0.27855244278907776,
"learning_rate": 4.602238259887825e-05,
"loss": 0.1278,
"step": 374
},
{
"epoch": 1.050420168067227,
"grad_norm": 0.2679445743560791,
"learning_rate": 4.599770683406991e-05,
"loss": 0.1353,
"step": 375
},
{
"epoch": 1.0532212885154062,
"grad_norm": 0.4758155941963196,
"learning_rate": 4.5972961423724087e-05,
"loss": 0.2309,
"step": 376
},
{
"epoch": 1.0560224089635855,
"grad_norm": 0.4918065071105957,
"learning_rate": 4.594814644991674e-05,
"loss": 0.2486,
"step": 377
},
{
"epoch": 1.0588235294117647,
"grad_norm": 0.45264533162117004,
"learning_rate": 4.592326199495461e-05,
"loss": 0.0964,
"step": 378
},
{
"epoch": 1.061624649859944,
"grad_norm": 0.2610427439212799,
"learning_rate": 4.5898308141374836e-05,
"loss": 0.1328,
"step": 379
},
{
"epoch": 1.0644257703081232,
"grad_norm": 0.6690388917922974,
"learning_rate": 4.5873284971944784e-05,
"loss": 0.2738,
"step": 380
},
{
"epoch": 1.0672268907563025,
"grad_norm": 0.28492769598960876,
"learning_rate": 4.5848192569661706e-05,
"loss": 0.1286,
"step": 381
},
{
"epoch": 1.0700280112044818,
"grad_norm": 0.30735185742378235,
"learning_rate": 4.5823031017752485e-05,
"loss": 0.2082,
"step": 382
},
{
"epoch": 1.072829131652661,
"grad_norm": 0.390432208776474,
"learning_rate": 4.579780039967339e-05,
"loss": 0.2061,
"step": 383
},
{
"epoch": 1.0756302521008403,
"grad_norm": 0.5083768367767334,
"learning_rate": 4.577250079910973e-05,
"loss": 0.2584,
"step": 384
},
{
"epoch": 1.0784313725490196,
"grad_norm": 0.2757357060909271,
"learning_rate": 4.574713229997563e-05,
"loss": 0.1371,
"step": 385
},
{
"epoch": 1.0812324929971988,
"grad_norm": 0.5543646216392517,
"learning_rate": 4.5721694986413753e-05,
"loss": 0.2586,
"step": 386
},
{
"epoch": 1.084033613445378,
"grad_norm": 0.29792216420173645,
"learning_rate": 4.5696188942795e-05,
"loss": 0.21,
"step": 387
},
{
"epoch": 1.0868347338935573,
"grad_norm": 0.2785909175872803,
"learning_rate": 4.5670614253718224e-05,
"loss": 0.213,
"step": 388
},
{
"epoch": 1.0896358543417366,
"grad_norm": 0.6011048555374146,
"learning_rate": 4.5644971004009984e-05,
"loss": 0.2512,
"step": 389
},
{
"epoch": 1.092436974789916,
"grad_norm": 0.32919472455978394,
"learning_rate": 4.5619259278724214e-05,
"loss": 0.1317,
"step": 390
},
{
"epoch": 1.0952380952380953,
"grad_norm": 0.19824554026126862,
"learning_rate": 4.5593479163141994e-05,
"loss": 0.1699,
"step": 391
},
{
"epoch": 1.0980392156862746,
"grad_norm": 0.20258326828479767,
"learning_rate": 4.556763074277124e-05,
"loss": 0.1732,
"step": 392
},
{
"epoch": 1.1008403361344539,
"grad_norm": 0.30259111523628235,
"learning_rate": 4.55417141033464e-05,
"loss": 0.138,
"step": 393
},
{
"epoch": 1.1036414565826331,
"grad_norm": 0.3482188880443573,
"learning_rate": 4.551572933082822e-05,
"loss": 0.1307,
"step": 394
},
{
"epoch": 1.1064425770308124,
"grad_norm": 0.24273444712162018,
"learning_rate": 4.548967651140341e-05,
"loss": 0.2059,
"step": 395
},
{
"epoch": 1.1092436974789917,
"grad_norm": 0.2121374011039734,
"learning_rate": 4.54635557314844e-05,
"loss": 0.1707,
"step": 396
},
{
"epoch": 1.112044817927171,
"grad_norm": 0.22848618030548096,
"learning_rate": 4.5437367077709e-05,
"loss": 0.2048,
"step": 397
},
{
"epoch": 1.1148459383753502,
"grad_norm": 0.4918600022792816,
"learning_rate": 4.541111063694019e-05,
"loss": 0.1049,
"step": 398
},
{
"epoch": 1.1176470588235294,
"grad_norm": 0.47923439741134644,
"learning_rate": 4.538478649626574e-05,
"loss": 0.0996,
"step": 399
},
{
"epoch": 1.1204481792717087,
"grad_norm": 0.42145538330078125,
"learning_rate": 4.5358394742998e-05,
"loss": 0.2455,
"step": 400
},
{
"epoch": 1.123249299719888,
"grad_norm": 0.8408336639404297,
"learning_rate": 4.533193546467357e-05,
"loss": 0.3064,
"step": 401
},
{
"epoch": 1.1260504201680672,
"grad_norm": 0.5717082619667053,
"learning_rate": 4.530540874905302e-05,
"loss": 0.2691,
"step": 402
},
{
"epoch": 1.1288515406162465,
"grad_norm": 0.4880336821079254,
"learning_rate": 4.527881468412058e-05,
"loss": 0.1031,
"step": 403
},
{
"epoch": 1.1316526610644257,
"grad_norm": 0.23222537338733673,
"learning_rate": 4.52521533580839e-05,
"loss": 0.2062,
"step": 404
},
{
"epoch": 1.134453781512605,
"grad_norm": 0.4019043445587158,
"learning_rate": 4.522542485937369e-05,
"loss": 0.2336,
"step": 405
},
{
"epoch": 1.1372549019607843,
"grad_norm": 0.4039878845214844,
"learning_rate": 4.5198629276643465e-05,
"loss": 0.2291,
"step": 406
},
{
"epoch": 1.1400560224089635,
"grad_norm": 0.24419677257537842,
"learning_rate": 4.517176669876927e-05,
"loss": 0.1874,
"step": 407
},
{
"epoch": 1.1428571428571428,
"grad_norm": 0.5417842268943787,
"learning_rate": 4.5144837214849334e-05,
"loss": 0.1077,
"step": 408
},
{
"epoch": 1.145658263305322,
"grad_norm": 0.5518860816955566,
"learning_rate": 4.5117840914203805e-05,
"loss": 0.1073,
"step": 409
},
{
"epoch": 1.1484593837535013,
"grad_norm": 0.282936692237854,
"learning_rate": 4.509077788637446e-05,
"loss": 0.1585,
"step": 410
},
{
"epoch": 1.1512605042016806,
"grad_norm": 0.22835111618041992,
"learning_rate": 4.5063648221124386e-05,
"loss": 0.1841,
"step": 411
},
{
"epoch": 1.1540616246498598,
"grad_norm": 0.36185768246650696,
"learning_rate": 4.503645200843771e-05,
"loss": 0.1392,
"step": 412
},
{
"epoch": 1.156862745098039,
"grad_norm": 0.5351378321647644,
"learning_rate": 4.500918933851928e-05,
"loss": 0.1027,
"step": 413
},
{
"epoch": 1.1596638655462184,
"grad_norm": 0.2676061987876892,
"learning_rate": 4.498186030179434e-05,
"loss": 0.158,
"step": 414
},
{
"epoch": 1.1624649859943978,
"grad_norm": 0.3645527958869934,
"learning_rate": 4.495446498890831e-05,
"loss": 0.1314,
"step": 415
},
{
"epoch": 1.165266106442577,
"grad_norm": 0.29105129837989807,
"learning_rate": 4.4927003490726404e-05,
"loss": 0.1341,
"step": 416
},
{
"epoch": 1.1680672268907564,
"grad_norm": 0.7233947515487671,
"learning_rate": 4.4899475898333367e-05,
"loss": 0.2707,
"step": 417
},
{
"epoch": 1.1708683473389356,
"grad_norm": 0.342913419008255,
"learning_rate": 4.487188230303316e-05,
"loss": 0.1763,
"step": 418
},
{
"epoch": 1.173669467787115,
"grad_norm": 0.2501223385334015,
"learning_rate": 4.48442227963487e-05,
"loss": 0.1555,
"step": 419
},
{
"epoch": 1.1764705882352942,
"grad_norm": 0.32546159625053406,
"learning_rate": 4.4816497470021454e-05,
"loss": 0.1288,
"step": 420
},
{
"epoch": 1.1792717086834734,
"grad_norm": 0.23415644466876984,
"learning_rate": 4.478870641601127e-05,
"loss": 0.1575,
"step": 421
},
{
"epoch": 1.1820728291316527,
"grad_norm": 0.2671819031238556,
"learning_rate": 4.4760849726495945e-05,
"loss": 0.13,
"step": 422
},
{
"epoch": 1.184873949579832,
"grad_norm": 1.1153579950332642,
"learning_rate": 4.473292749387102e-05,
"loss": 0.3314,
"step": 423
},
{
"epoch": 1.1876750700280112,
"grad_norm": 0.2628577947616577,
"learning_rate": 4.47049398107494e-05,
"loss": 0.1561,
"step": 424
},
{
"epoch": 1.1904761904761905,
"grad_norm": 0.2286321073770523,
"learning_rate": 4.467688676996111e-05,
"loss": 0.1583,
"step": 425
},
{
"epoch": 1.1932773109243697,
"grad_norm": 0.29673612117767334,
"learning_rate": 4.464876846455291e-05,
"loss": 0.1242,
"step": 426
},
{
"epoch": 1.196078431372549,
"grad_norm": 0.44082918763160706,
"learning_rate": 4.4620584987788065e-05,
"loss": 0.2101,
"step": 427
},
{
"epoch": 1.1988795518207283,
"grad_norm": 0.2973793148994446,
"learning_rate": 4.4592336433146e-05,
"loss": 0.1493,
"step": 428
},
{
"epoch": 1.2016806722689075,
"grad_norm": 0.2873530089855194,
"learning_rate": 4.4564022894321966e-05,
"loss": 0.1652,
"step": 429
},
{
"epoch": 1.2044817927170868,
"grad_norm": 0.5498677492141724,
"learning_rate": 4.4535644465226796e-05,
"loss": 0.0821,
"step": 430
},
{
"epoch": 1.207282913165266,
"grad_norm": 0.249070942401886,
"learning_rate": 4.450720123998651e-05,
"loss": 0.1611,
"step": 431
},
{
"epoch": 1.2100840336134453,
"grad_norm": 0.5754450559616089,
"learning_rate": 4.4478693312942054e-05,
"loss": 0.236,
"step": 432
},
{
"epoch": 1.2128851540616246,
"grad_norm": 0.31797465682029724,
"learning_rate": 4.4450120778649014e-05,
"loss": 0.1199,
"step": 433
},
{
"epoch": 1.215686274509804,
"grad_norm": 0.4542624056339264,
"learning_rate": 4.4421483731877214e-05,
"loss": 0.1787,
"step": 434
},
{
"epoch": 1.2184873949579833,
"grad_norm": 0.35622158646583557,
"learning_rate": 4.43927822676105e-05,
"loss": 0.1261,
"step": 435
},
{
"epoch": 1.2212885154061626,
"grad_norm": 0.9345407485961914,
"learning_rate": 4.4364016481046336e-05,
"loss": 0.2648,
"step": 436
},
{
"epoch": 1.2240896358543418,
"grad_norm": 0.4623335301876068,
"learning_rate": 4.433518646759558e-05,
"loss": 0.187,
"step": 437
},
{
"epoch": 1.226890756302521,
"grad_norm": 0.8016291856765747,
"learning_rate": 4.4306292322882066e-05,
"loss": 0.2376,
"step": 438
},
{
"epoch": 1.2296918767507004,
"grad_norm": 0.3608405292034149,
"learning_rate": 4.4277334142742375e-05,
"loss": 0.1337,
"step": 439
},
{
"epoch": 1.2324929971988796,
"grad_norm": 0.9408867359161377,
"learning_rate": 4.424831202322548e-05,
"loss": 0.2775,
"step": 440
},
{
"epoch": 1.2352941176470589,
"grad_norm": 0.36630916595458984,
"learning_rate": 4.421922606059242e-05,
"loss": 0.1437,
"step": 441
},
{
"epoch": 1.2380952380952381,
"grad_norm": 0.5501814484596252,
"learning_rate": 4.419007635131598e-05,
"loss": 0.129,
"step": 442
},
{
"epoch": 1.2408963585434174,
"grad_norm": 0.5585311651229858,
"learning_rate": 4.416086299208041e-05,
"loss": 0.2171,
"step": 443
},
{
"epoch": 1.2436974789915967,
"grad_norm": 0.5523871779441833,
"learning_rate": 4.413158607978104e-05,
"loss": 0.1953,
"step": 444
},
{
"epoch": 1.246498599439776,
"grad_norm": 0.778249979019165,
"learning_rate": 4.410224571152403e-05,
"loss": 0.0988,
"step": 445
},
{
"epoch": 1.2492997198879552,
"grad_norm": 0.3328462839126587,
"learning_rate": 4.407284198462597e-05,
"loss": 0.189,
"step": 446
},
{
"epoch": 1.2521008403361344,
"grad_norm": 0.3411165177822113,
"learning_rate": 4.404337499661364e-05,
"loss": 0.1717,
"step": 447
},
{
"epoch": 1.2549019607843137,
"grad_norm": 0.3889773488044739,
"learning_rate": 4.4013844845223626e-05,
"loss": 0.2114,
"step": 448
},
{
"epoch": 1.257703081232493,
"grad_norm": 0.2957543134689331,
"learning_rate": 4.398425162840202e-05,
"loss": 0.1762,
"step": 449
},
{
"epoch": 1.2605042016806722,
"grad_norm": 0.4489629864692688,
"learning_rate": 4.395459544430407e-05,
"loss": 0.2177,
"step": 450
},
{
"epoch": 1.2633053221288515,
"grad_norm": 0.5136566162109375,
"learning_rate": 4.3924876391293915e-05,
"loss": 0.1395,
"step": 451
},
{
"epoch": 1.2661064425770308,
"grad_norm": 0.6125023365020752,
"learning_rate": 4.3895094567944186e-05,
"loss": 0.1241,
"step": 452
},
{
"epoch": 1.26890756302521,
"grad_norm": 0.3510366380214691,
"learning_rate": 4.386525007303571e-05,
"loss": 0.1614,
"step": 453
},
{
"epoch": 1.2717086834733893,
"grad_norm": 0.31088364124298096,
"learning_rate": 4.3835343005557215e-05,
"loss": 0.1631,
"step": 454
},
{
"epoch": 1.2745098039215685,
"grad_norm": 0.4494378864765167,
"learning_rate": 4.380537346470495e-05,
"loss": 0.1324,
"step": 455
},
{
"epoch": 1.2773109243697478,
"grad_norm": 0.5657458901405334,
"learning_rate": 4.3775341549882364e-05,
"loss": 0.1197,
"step": 456
},
{
"epoch": 1.280112044817927,
"grad_norm": 0.26143553853034973,
"learning_rate": 4.374524736069982e-05,
"loss": 0.1596,
"step": 457
},
{
"epoch": 1.2829131652661063,
"grad_norm": 0.3509024977684021,
"learning_rate": 4.37150909969742e-05,
"loss": 0.1464,
"step": 458
},
{
"epoch": 1.2857142857142856,
"grad_norm": 0.3572234809398651,
"learning_rate": 4.368487255872864e-05,
"loss": 0.1751,
"step": 459
},
{
"epoch": 1.2885154061624648,
"grad_norm": 0.50468909740448,
"learning_rate": 4.365459214619214e-05,
"loss": 0.0963,
"step": 460
},
{
"epoch": 1.2913165266106443,
"grad_norm": 0.3895762860774994,
"learning_rate": 4.3624249859799274e-05,
"loss": 0.1136,
"step": 461
},
{
"epoch": 1.2941176470588236,
"grad_norm": 0.6248763203620911,
"learning_rate": 4.359384580018982e-05,
"loss": 0.1675,
"step": 462
},
{
"epoch": 1.2969187675070029,
"grad_norm": 0.37576088309288025,
"learning_rate": 4.356338006820849e-05,
"loss": 0.1191,
"step": 463
},
{
"epoch": 1.2997198879551821,
"grad_norm": 0.5103853940963745,
"learning_rate": 4.35328527649045e-05,
"loss": 0.1591,
"step": 464
},
{
"epoch": 1.3025210084033614,
"grad_norm": 0.6339231133460999,
"learning_rate": 4.35022639915313e-05,
"loss": 0.1657,
"step": 465
},
{
"epoch": 1.3053221288515406,
"grad_norm": 0.3810737729072571,
"learning_rate": 4.347161384954626e-05,
"loss": 0.1179,
"step": 466
},
{
"epoch": 1.30812324929972,
"grad_norm": 1.3601053953170776,
"learning_rate": 4.344090244061024e-05,
"loss": 0.2517,
"step": 467
},
{
"epoch": 1.3109243697478992,
"grad_norm": 0.40818560123443604,
"learning_rate": 4.341012986658738e-05,
"loss": 0.123,
"step": 468
},
{
"epoch": 1.3137254901960784,
"grad_norm": 1.1468818187713623,
"learning_rate": 4.337929622954463e-05,
"loss": 0.2502,
"step": 469
},
{
"epoch": 1.3165266106442577,
"grad_norm": 1.0096850395202637,
"learning_rate": 4.334840163175151e-05,
"loss": 0.2248,
"step": 470
},
{
"epoch": 1.319327731092437,
"grad_norm": 0.43895837664604187,
"learning_rate": 4.3317446175679735e-05,
"loss": 0.1442,
"step": 471
},
{
"epoch": 1.3221288515406162,
"grad_norm": 1.211270809173584,
"learning_rate": 4.3286429964002856e-05,
"loss": 0.2676,
"step": 472
},
{
"epoch": 1.3249299719887955,
"grad_norm": 0.5929017663002014,
"learning_rate": 4.325535309959596e-05,
"loss": 0.0812,
"step": 473
},
{
"epoch": 1.3277310924369747,
"grad_norm": 0.3913727104663849,
"learning_rate": 4.3224215685535294e-05,
"loss": 0.1334,
"step": 474
},
{
"epoch": 1.330532212885154,
"grad_norm": 0.4079723358154297,
"learning_rate": 4.3193017825097936e-05,
"loss": 0.1742,
"step": 475
},
{
"epoch": 1.3333333333333333,
"grad_norm": 0.3363523781299591,
"learning_rate": 4.316175962176148e-05,
"loss": 0.1315,
"step": 476
},
{
"epoch": 1.3361344537815127,
"grad_norm": 0.5548957586288452,
"learning_rate": 4.313044117920363e-05,
"loss": 0.0904,
"step": 477
},
{
"epoch": 1.338935574229692,
"grad_norm": 0.2717703580856323,
"learning_rate": 4.3099062601301904e-05,
"loss": 0.1617,
"step": 478
},
{
"epoch": 1.3417366946778713,
"grad_norm": 0.48116743564605713,
"learning_rate": 4.30676239921333e-05,
"loss": 0.2158,
"step": 479
},
{
"epoch": 1.3445378151260505,
"grad_norm": 0.5152187943458557,
"learning_rate": 4.3036125455973896e-05,
"loss": 0.0882,
"step": 480
},
{
"epoch": 1.3473389355742298,
"grad_norm": 0.747715175151825,
"learning_rate": 4.300456709729856e-05,
"loss": 0.2583,
"step": 481
},
{
"epoch": 1.350140056022409,
"grad_norm": 0.24533413350582123,
"learning_rate": 4.2972949020780575e-05,
"loss": 0.1618,
"step": 482
},
{
"epoch": 1.3529411764705883,
"grad_norm": 0.2675352394580841,
"learning_rate": 4.294127133129128e-05,
"loss": 0.1735,
"step": 483
},
{
"epoch": 1.3557422969187676,
"grad_norm": 0.40604180097579956,
"learning_rate": 4.290953413389977e-05,
"loss": 0.1971,
"step": 484
},
{
"epoch": 1.3585434173669468,
"grad_norm": 0.5014567971229553,
"learning_rate": 4.2877737533872485e-05,
"loss": 0.0903,
"step": 485
},
{
"epoch": 1.361344537815126,
"grad_norm": 0.3054341971874237,
"learning_rate": 4.284588163667292e-05,
"loss": 0.1307,
"step": 486
},
{
"epoch": 1.3641456582633054,
"grad_norm": 0.5000020265579224,
"learning_rate": 4.281396654796124e-05,
"loss": 0.0904,
"step": 487
},
{
"epoch": 1.3669467787114846,
"grad_norm": 0.3115267753601074,
"learning_rate": 4.278199237359392e-05,
"loss": 0.1209,
"step": 488
},
{
"epoch": 1.3697478991596639,
"grad_norm": 0.22438497841358185,
"learning_rate": 4.274995921962343e-05,
"loss": 0.1775,
"step": 489
},
{
"epoch": 1.3725490196078431,
"grad_norm": 0.25052976608276367,
"learning_rate": 4.271786719229786e-05,
"loss": 0.1337,
"step": 490
},
{
"epoch": 1.3753501400560224,
"grad_norm": 0.2692072093486786,
"learning_rate": 4.268571639806057e-05,
"loss": 0.1773,
"step": 491
},
{
"epoch": 1.3781512605042017,
"grad_norm": 0.5472849011421204,
"learning_rate": 4.265350694354985e-05,
"loss": 0.2203,
"step": 492
},
{
"epoch": 1.380952380952381,
"grad_norm": 0.2849833071231842,
"learning_rate": 4.2621238935598524e-05,
"loss": 0.1256,
"step": 493
},
{
"epoch": 1.3837535014005602,
"grad_norm": 0.4663335084915161,
"learning_rate": 4.2588912481233666e-05,
"loss": 0.0828,
"step": 494
},
{
"epoch": 1.3865546218487395,
"grad_norm": 0.3137625753879547,
"learning_rate": 4.2556527687676186e-05,
"loss": 0.1409,
"step": 495
},
{
"epoch": 1.3893557422969187,
"grad_norm": 0.23330673575401306,
"learning_rate": 4.2524084662340494e-05,
"loss": 0.1695,
"step": 496
},
{
"epoch": 1.392156862745098,
"grad_norm": 0.2527044117450714,
"learning_rate": 4.249158351283414e-05,
"loss": 0.125,
"step": 497
},
{
"epoch": 1.3949579831932772,
"grad_norm": 0.2422427237033844,
"learning_rate": 4.2459024346957475e-05,
"loss": 0.1719,
"step": 498
},
{
"epoch": 1.3977591036414565,
"grad_norm": 0.25663718581199646,
"learning_rate": 4.2426407272703284e-05,
"loss": 0.1757,
"step": 499
},
{
"epoch": 1.4005602240896358,
"grad_norm": 0.2737230062484741,
"learning_rate": 4.2393732398256394e-05,
"loss": 0.114,
"step": 500
},
{
"epoch": 1.403361344537815,
"grad_norm": 0.2504082918167114,
"learning_rate": 4.236099983199338e-05,
"loss": 0.1697,
"step": 501
},
{
"epoch": 1.4061624649859943,
"grad_norm": 0.911520779132843,
"learning_rate": 4.232820968248214e-05,
"loss": 0.2774,
"step": 502
},
{
"epoch": 1.4089635854341735,
"grad_norm": 0.4278314411640167,
"learning_rate": 4.229536205848158e-05,
"loss": 0.2105,
"step": 503
},
{
"epoch": 1.4117647058823528,
"grad_norm": 0.22557204961776733,
"learning_rate": 4.2262457068941245e-05,
"loss": 0.1257,
"step": 504
},
{
"epoch": 1.4145658263305323,
"grad_norm": 0.2534330189228058,
"learning_rate": 4.222949482300094e-05,
"loss": 0.1229,
"step": 505
},
{
"epoch": 1.4173669467787116,
"grad_norm": 0.20387370884418488,
"learning_rate": 4.219647542999037e-05,
"loss": 0.1262,
"step": 506
},
{
"epoch": 1.4201680672268908,
"grad_norm": 0.20558597147464752,
"learning_rate": 4.21633989994288e-05,
"loss": 0.1239,
"step": 507
},
{
"epoch": 1.42296918767507,
"grad_norm": 0.6656189560890198,
"learning_rate": 4.2130265641024705e-05,
"loss": 0.2589,
"step": 508
},
{
"epoch": 1.4257703081232493,
"grad_norm": 0.3030642569065094,
"learning_rate": 4.209707546467531e-05,
"loss": 0.1137,
"step": 509
},
{
"epoch": 1.4285714285714286,
"grad_norm": 0.2517474591732025,
"learning_rate": 4.206382858046636e-05,
"loss": 0.1739,
"step": 510
},
{
"epoch": 1.4313725490196079,
"grad_norm": 0.21296930313110352,
"learning_rate": 4.2030525098671646e-05,
"loss": 0.1274,
"step": 511
},
{
"epoch": 1.4341736694677871,
"grad_norm": 0.23091089725494385,
"learning_rate": 4.199716512975272e-05,
"loss": 0.1305,
"step": 512
},
{
"epoch": 1.4369747899159664,
"grad_norm": 0.25984320044517517,
"learning_rate": 4.1963748784358456e-05,
"loss": 0.1668,
"step": 513
},
{
"epoch": 1.4397759103641457,
"grad_norm": 0.2812284231185913,
"learning_rate": 4.1930276173324756e-05,
"loss": 0.1218,
"step": 514
},
{
"epoch": 1.442577030812325,
"grad_norm": 0.42426514625549316,
"learning_rate": 4.189674740767411e-05,
"loss": 0.0794,
"step": 515
},
{
"epoch": 1.4453781512605042,
"grad_norm": 0.23902854323387146,
"learning_rate": 4.1863162598615265e-05,
"loss": 0.1354,
"step": 516
},
{
"epoch": 1.4481792717086834,
"grad_norm": 0.2747328281402588,
"learning_rate": 4.1829521857542885e-05,
"loss": 0.1168,
"step": 517
},
{
"epoch": 1.4509803921568627,
"grad_norm": 0.25116053223609924,
"learning_rate": 4.1795825296037126e-05,
"loss": 0.1312,
"step": 518
},
{
"epoch": 1.453781512605042,
"grad_norm": 0.7276238799095154,
"learning_rate": 4.176207302586329e-05,
"loss": 0.2773,
"step": 519
},
{
"epoch": 1.4565826330532212,
"grad_norm": 0.26795586943626404,
"learning_rate": 4.172826515897146e-05,
"loss": 0.1678,
"step": 520
},
{
"epoch": 1.4593837535014005,
"grad_norm": 0.42587825655937195,
"learning_rate": 4.169440180749612e-05,
"loss": 0.0729,
"step": 521
},
{
"epoch": 1.46218487394958,
"grad_norm": 0.3999551832675934,
"learning_rate": 4.166048308375579e-05,
"loss": 0.1851,
"step": 522
},
{
"epoch": 1.4649859943977592,
"grad_norm": 0.2874641418457031,
"learning_rate": 4.162650910025264e-05,
"loss": 0.162,
"step": 523
},
{
"epoch": 1.4677871148459385,
"grad_norm": 0.7796664834022522,
"learning_rate": 4.159247996967215e-05,
"loss": 0.2352,
"step": 524
},
{
"epoch": 1.4705882352941178,
"grad_norm": 1.0714823007583618,
"learning_rate": 4.1558395804882695e-05,
"loss": 0.3158,
"step": 525
},
{
"epoch": 1.473389355742297,
"grad_norm": 1.209743618965149,
"learning_rate": 4.152425671893518e-05,
"loss": 0.3378,
"step": 526
},
{
"epoch": 1.4761904761904763,
"grad_norm": 0.25424477458000183,
"learning_rate": 4.149006282506268e-05,
"loss": 0.1739,
"step": 527
},
{
"epoch": 1.4789915966386555,
"grad_norm": 0.2382468283176422,
"learning_rate": 4.145581423668008e-05,
"loss": 0.1301,
"step": 528
},
{
"epoch": 1.4817927170868348,
"grad_norm": 0.2188778817653656,
"learning_rate": 4.142151106738364e-05,
"loss": 0.1625,
"step": 529
},
{
"epoch": 1.484593837535014,
"grad_norm": 0.4427192807197571,
"learning_rate": 4.138715343095068e-05,
"loss": 0.085,
"step": 530
},
{
"epoch": 1.4873949579831933,
"grad_norm": 0.22112633287906647,
"learning_rate": 4.135274144133918e-05,
"loss": 0.1694,
"step": 531
},
{
"epoch": 1.4901960784313726,
"grad_norm": 0.27271297574043274,
"learning_rate": 4.1318275212687376e-05,
"loss": 0.1251,
"step": 532
},
{
"epoch": 1.4929971988795518,
"grad_norm": 0.3934704661369324,
"learning_rate": 4.1283754859313414e-05,
"loss": 0.2116,
"step": 533
},
{
"epoch": 1.495798319327731,
"grad_norm": 0.33457499742507935,
"learning_rate": 4.124918049571499e-05,
"loss": 0.2102,
"step": 534
},
{
"epoch": 1.4985994397759104,
"grad_norm": 0.33014121651649475,
"learning_rate": 4.12145522365689e-05,
"loss": 0.2068,
"step": 535
},
{
"epoch": 1.5014005602240896,
"grad_norm": 1.0461491346359253,
"learning_rate": 4.117987019673073e-05,
"loss": 0.3212,
"step": 536
},
{
"epoch": 1.504201680672269,
"grad_norm": 0.45663779973983765,
"learning_rate": 4.1145134491234427e-05,
"loss": 0.2363,
"step": 537
},
{
"epoch": 1.5070028011204482,
"grad_norm": 0.46917828917503357,
"learning_rate": 4.111034523529196e-05,
"loss": 0.2326,
"step": 538
},
{
"epoch": 1.5098039215686274,
"grad_norm": 0.30291152000427246,
"learning_rate": 4.1075502544292884e-05,
"loss": 0.2064,
"step": 539
},
{
"epoch": 1.5126050420168067,
"grad_norm": 0.3694068491458893,
"learning_rate": 4.1040606533804024e-05,
"loss": 0.1405,
"step": 540
},
{
"epoch": 1.515406162464986,
"grad_norm": 0.3829762935638428,
"learning_rate": 4.100565731956903e-05,
"loss": 0.1385,
"step": 541
},
{
"epoch": 1.5182072829131652,
"grad_norm": 0.2339664101600647,
"learning_rate": 4.097065501750804e-05,
"loss": 0.1658,
"step": 542
},
{
"epoch": 1.5210084033613445,
"grad_norm": 0.2567518353462219,
"learning_rate": 4.093559974371725e-05,
"loss": 0.1748,
"step": 543
},
{
"epoch": 1.5238095238095237,
"grad_norm": 0.5793279409408569,
"learning_rate": 4.0900491614468553e-05,
"loss": 0.1087,
"step": 544
},
{
"epoch": 1.526610644257703,
"grad_norm": 0.5785588622093201,
"learning_rate": 4.086533074620919e-05,
"loss": 0.1099,
"step": 545
},
{
"epoch": 1.5294117647058822,
"grad_norm": 0.5617150664329529,
"learning_rate": 4.083011725556129e-05,
"loss": 0.1074,
"step": 546
},
{
"epoch": 1.5322128851540615,
"grad_norm": 0.36825308203697205,
"learning_rate": 4.0794851259321546e-05,
"loss": 0.2374,
"step": 547
},
{
"epoch": 1.5350140056022408,
"grad_norm": 0.5937955379486084,
"learning_rate": 4.0759532874460785e-05,
"loss": 0.2677,
"step": 548
},
{
"epoch": 1.53781512605042,
"grad_norm": 0.24256612360477448,
"learning_rate": 4.0724162218123596e-05,
"loss": 0.1698,
"step": 549
},
{
"epoch": 1.5406162464985993,
"grad_norm": 0.5699710249900818,
"learning_rate": 4.068873940762796e-05,
"loss": 0.1086,
"step": 550
},
{
"epoch": 1.5434173669467786,
"grad_norm": 0.3110926151275635,
"learning_rate": 4.065326456046483e-05,
"loss": 0.2061,
"step": 551
},
{
"epoch": 1.5462184873949578,
"grad_norm": 0.36440059542655945,
"learning_rate": 4.0617737794297764e-05,
"loss": 0.2146,
"step": 552
},
{
"epoch": 1.5490196078431373,
"grad_norm": 0.2662959098815918,
"learning_rate": 4.058215922696252e-05,
"loss": 0.1728,
"step": 553
},
{
"epoch": 1.5518207282913166,
"grad_norm": 0.3178268074989319,
"learning_rate": 4.0546528976466655e-05,
"loss": 0.1402,
"step": 554
},
{
"epoch": 1.5546218487394958,
"grad_norm": 0.30780094861984253,
"learning_rate": 4.051084716098921e-05,
"loss": 0.2053,
"step": 555
},
{
"epoch": 1.557422969187675,
"grad_norm": 0.21479925513267517,
"learning_rate": 4.047511389888017e-05,
"loss": 0.1748,
"step": 556
},
{
"epoch": 1.5602240896358543,
"grad_norm": 0.24510297179222107,
"learning_rate": 4.043932930866021e-05,
"loss": 0.1683,
"step": 557
},
{
"epoch": 1.5630252100840336,
"grad_norm": 0.2353736162185669,
"learning_rate": 4.040349350902028e-05,
"loss": 0.1643,
"step": 558
},
{
"epoch": 1.5658263305322129,
"grad_norm": 0.5024756789207458,
"learning_rate": 4.036760661882109e-05,
"loss": 0.0958,
"step": 559
},
{
"epoch": 1.5686274509803921,
"grad_norm": 0.20932908356189728,
"learning_rate": 4.033166875709291e-05,
"loss": 0.1738,
"step": 560
},
{
"epoch": 1.5714285714285714,
"grad_norm": 0.20001398026943207,
"learning_rate": 4.029568004303501e-05,
"loss": 0.173,
"step": 561
},
{
"epoch": 1.5742296918767507,
"grad_norm": 0.2733915150165558,
"learning_rate": 4.025964059601535e-05,
"loss": 0.1329,
"step": 562
},
{
"epoch": 1.57703081232493,
"grad_norm": 0.48111703991889954,
"learning_rate": 4.022355053557015e-05,
"loss": 0.2336,
"step": 563
},
{
"epoch": 1.5798319327731094,
"grad_norm": 0.6872272491455078,
"learning_rate": 4.018740998140352e-05,
"loss": 0.2748,
"step": 564
},
{
"epoch": 1.5826330532212887,
"grad_norm": 0.7389792203903198,
"learning_rate": 4.015121905338704e-05,
"loss": 0.2948,
"step": 565
},
{
"epoch": 1.585434173669468,
"grad_norm": 0.46626532077789307,
"learning_rate": 4.011497787155938e-05,
"loss": 0.2341,
"step": 566
},
{
"epoch": 1.5882352941176472,
"grad_norm": 0.2073425054550171,
"learning_rate": 4.007868655612586e-05,
"loss": 0.1662,
"step": 567
},
{
"epoch": 1.5910364145658265,
"grad_norm": 0.2893884479999542,
"learning_rate": 4.004234522745813e-05,
"loss": 0.1419,
"step": 568
},
{
"epoch": 1.5938375350140057,
"grad_norm": 0.3202155828475952,
"learning_rate": 4.00059540060937e-05,
"loss": 0.1377,
"step": 569
},
{
"epoch": 1.596638655462185,
"grad_norm": 0.21610812842845917,
"learning_rate": 3.996951301273557e-05,
"loss": 0.1696,
"step": 570
},
{
"epoch": 1.5994397759103642,
"grad_norm": 0.1836165338754654,
"learning_rate": 3.993302236825181e-05,
"loss": 0.1733,
"step": 571
},
{
"epoch": 1.6022408963585435,
"grad_norm": 0.2764729857444763,
"learning_rate": 3.98964821936752e-05,
"loss": 0.1683,
"step": 572
},
{
"epoch": 1.6050420168067228,
"grad_norm": 0.31198838353157043,
"learning_rate": 3.9859892610202785e-05,
"loss": 0.1573,
"step": 573
},
{
"epoch": 1.607843137254902,
"grad_norm": 0.5770326852798462,
"learning_rate": 3.982325373919549e-05,
"loss": 0.2388,
"step": 574
},
{
"epoch": 1.6106442577030813,
"grad_norm": 0.3288671672344208,
"learning_rate": 3.9786565702177723e-05,
"loss": 0.2145,
"step": 575
},
{
"epoch": 1.6134453781512605,
"grad_norm": 0.2905415892601013,
"learning_rate": 3.974982862083697e-05,
"loss": 0.1867,
"step": 576
},
{
"epoch": 1.6162464985994398,
"grad_norm": 0.2127629816532135,
"learning_rate": 3.9713042617023386e-05,
"loss": 0.1782,
"step": 577
},
{
"epoch": 1.619047619047619,
"grad_norm": 0.2230168879032135,
"learning_rate": 3.967620781274938e-05,
"loss": 0.1759,
"step": 578
},
{
"epoch": 1.6218487394957983,
"grad_norm": 0.3231712281703949,
"learning_rate": 3.9639324330189236e-05,
"loss": 0.207,
"step": 579
},
{
"epoch": 1.6246498599439776,
"grad_norm": 0.3769891858100891,
"learning_rate": 3.960239229167869e-05,
"loss": 0.1371,
"step": 580
},
{
"epoch": 1.6274509803921569,
"grad_norm": 0.32875877618789673,
"learning_rate": 3.956541181971455e-05,
"loss": 0.1396,
"step": 581
},
{
"epoch": 1.6302521008403361,
"grad_norm": 0.2713623046875,
"learning_rate": 3.9528383036954224e-05,
"loss": 0.1653,
"step": 582
},
{
"epoch": 1.6330532212885154,
"grad_norm": 1.0410537719726562,
"learning_rate": 3.949130606621541e-05,
"loss": 0.3346,
"step": 583
},
{
"epoch": 1.6358543417366946,
"grad_norm": 0.23961792886257172,
"learning_rate": 3.945418103047558e-05,
"loss": 0.1784,
"step": 584
},
{
"epoch": 1.638655462184874,
"grad_norm": 0.286924809217453,
"learning_rate": 3.941700805287168e-05,
"loss": 0.2005,
"step": 585
},
{
"epoch": 1.6414565826330532,
"grad_norm": 0.38071775436401367,
"learning_rate": 3.937978725669965e-05,
"loss": 0.1445,
"step": 586
},
{
"epoch": 1.6442577030812324,
"grad_norm": 0.5587998032569885,
"learning_rate": 3.934251876541404e-05,
"loss": 0.1045,
"step": 587
},
{
"epoch": 1.6470588235294117,
"grad_norm": 0.3966423273086548,
"learning_rate": 3.9305202702627575e-05,
"loss": 0.1404,
"step": 588
},
{
"epoch": 1.649859943977591,
"grad_norm": 0.3944240212440491,
"learning_rate": 3.92678391921108e-05,
"loss": 0.139,
"step": 589
},
{
"epoch": 1.6526610644257702,
"grad_norm": 0.25802481174468994,
"learning_rate": 3.9230428357791595e-05,
"loss": 0.1688,
"step": 590
},
{
"epoch": 1.6554621848739495,
"grad_norm": 0.22108642756938934,
"learning_rate": 3.919297032375485e-05,
"loss": 0.1737,
"step": 591
},
{
"epoch": 1.6582633053221287,
"grad_norm": 0.3680080473423004,
"learning_rate": 3.915546521424198e-05,
"loss": 0.1375,
"step": 592
},
{
"epoch": 1.661064425770308,
"grad_norm": 0.2191331535577774,
"learning_rate": 3.9117913153650546e-05,
"loss": 0.1674,
"step": 593
},
{
"epoch": 1.6638655462184873,
"grad_norm": 0.4968366324901581,
"learning_rate": 3.908031426653383e-05,
"loss": 0.0956,
"step": 594
},
{
"epoch": 1.6666666666666665,
"grad_norm": 0.2787998616695404,
"learning_rate": 3.9042668677600436e-05,
"loss": 0.1356,
"step": 595
},
{
"epoch": 1.6694677871148458,
"grad_norm": 0.37599462270736694,
"learning_rate": 3.900497651171388e-05,
"loss": 0.2054,
"step": 596
},
{
"epoch": 1.6722689075630253,
"grad_norm": 0.28965288400650024,
"learning_rate": 3.8967237893892134e-05,
"loss": 0.1325,
"step": 597
},
{
"epoch": 1.6750700280112045,
"grad_norm": 0.44063901901245117,
"learning_rate": 3.892945294930728e-05,
"loss": 0.0864,
"step": 598
},
{
"epoch": 1.6778711484593838,
"grad_norm": 0.29227393865585327,
"learning_rate": 3.889162180328505e-05,
"loss": 0.1258,
"step": 599
},
{
"epoch": 1.680672268907563,
"grad_norm": 0.3660801351070404,
"learning_rate": 3.885374458130438e-05,
"loss": 0.2033,
"step": 600
},
{
"epoch": 1.6834733893557423,
"grad_norm": 0.4446983337402344,
"learning_rate": 3.881582140899707e-05,
"loss": 0.0834,
"step": 601
},
{
"epoch": 1.6862745098039216,
"grad_norm": 0.7357985377311707,
"learning_rate": 3.877785241214733e-05,
"loss": 0.244,
"step": 602
},
{
"epoch": 1.6890756302521008,
"grad_norm": 0.2903584837913513,
"learning_rate": 3.873983771669133e-05,
"loss": 0.1743,
"step": 603
},
{
"epoch": 1.69187675070028,
"grad_norm": 0.2409980297088623,
"learning_rate": 3.8701777448716856e-05,
"loss": 0.1273,
"step": 604
},
{
"epoch": 1.6946778711484594,
"grad_norm": 0.22482717037200928,
"learning_rate": 3.866367173446281e-05,
"loss": 0.1268,
"step": 605
},
{
"epoch": 1.6974789915966386,
"grad_norm": 0.2362489402294159,
"learning_rate": 3.862552070031886e-05,
"loss": 0.1251,
"step": 606
},
{
"epoch": 1.7002801120448179,
"grad_norm": 0.23961283266544342,
"learning_rate": 3.858732447282497e-05,
"loss": 0.1698,
"step": 607
},
{
"epoch": 1.7030812324929971,
"grad_norm": 0.5947995185852051,
"learning_rate": 3.854908317867102e-05,
"loss": 0.2085,
"step": 608
},
{
"epoch": 1.7058823529411766,
"grad_norm": 0.8783779740333557,
"learning_rate": 3.851079694469636e-05,
"loss": 0.2709,
"step": 609
},
{
"epoch": 1.708683473389356,
"grad_norm": 0.23939639329910278,
"learning_rate": 3.8472465897889394e-05,
"loss": 0.1278,
"step": 610
},
{
"epoch": 1.7114845938375352,
"grad_norm": 1.0147463083267212,
"learning_rate": 3.843409016538716e-05,
"loss": 0.2919,
"step": 611
},
{
"epoch": 1.7142857142857144,
"grad_norm": 0.7156258225440979,
"learning_rate": 3.8395669874474915e-05,
"loss": 0.2452,
"step": 612
},
{
"epoch": 1.7170868347338937,
"grad_norm": 0.23990198969841003,
"learning_rate": 3.835720515258572e-05,
"loss": 0.1744,
"step": 613
},
{
"epoch": 1.719887955182073,
"grad_norm": 0.6649200320243835,
"learning_rate": 3.831869612729999e-05,
"loss": 0.2606,
"step": 614
},
{
"epoch": 1.7226890756302522,
"grad_norm": 0.2535988390445709,
"learning_rate": 3.828014292634509e-05,
"loss": 0.1681,
"step": 615
},
{
"epoch": 1.7254901960784315,
"grad_norm": 0.620143711566925,
"learning_rate": 3.8241545677594895e-05,
"loss": 0.2662,
"step": 616
},
{
"epoch": 1.7282913165266107,
"grad_norm": 0.2728918492794037,
"learning_rate": 3.820290450906941e-05,
"loss": 0.1692,
"step": 617
},
{
"epoch": 1.73109243697479,
"grad_norm": 0.2622087001800537,
"learning_rate": 3.816421954893428e-05,
"loss": 0.1215,
"step": 618
},
{
"epoch": 1.7338935574229692,
"grad_norm": 0.3146950304508209,
"learning_rate": 3.8125490925500425e-05,
"loss": 0.1987,
"step": 619
},
{
"epoch": 1.7366946778711485,
"grad_norm": 0.5234703421592712,
"learning_rate": 3.808671876722357e-05,
"loss": 0.2537,
"step": 620
},
{
"epoch": 1.7394957983193278,
"grad_norm": 0.356790155172348,
"learning_rate": 3.804790320270384e-05,
"loss": 0.2111,
"step": 621
},
{
"epoch": 1.742296918767507,
"grad_norm": 0.4688396155834198,
"learning_rate": 3.800904436068533e-05,
"loss": 0.0948,
"step": 622
},
{
"epoch": 1.7450980392156863,
"grad_norm": 0.2965690493583679,
"learning_rate": 3.797014237005571e-05,
"loss": 0.1953,
"step": 623
},
{
"epoch": 1.7478991596638656,
"grad_norm": 0.45940619707107544,
"learning_rate": 3.793119735984572e-05,
"loss": 0.2438,
"step": 624
},
{
"epoch": 1.7507002801120448,
"grad_norm": 0.34540608525276184,
"learning_rate": 3.78922094592288e-05,
"loss": 0.1378,
"step": 625
},
{
"epoch": 1.753501400560224,
"grad_norm": 0.2714684307575226,
"learning_rate": 3.785317879752066e-05,
"loss": 0.2209,
"step": 626
},
{
"epoch": 1.7563025210084033,
"grad_norm": 0.6053199172019958,
"learning_rate": 3.781410550417885e-05,
"loss": 0.2887,
"step": 627
},
{
"epoch": 1.7591036414565826,
"grad_norm": 0.3671252429485321,
"learning_rate": 3.77749897088023e-05,
"loss": 0.1822,
"step": 628
},
{
"epoch": 1.7619047619047619,
"grad_norm": 0.2441740483045578,
"learning_rate": 3.773583154113092e-05,
"loss": 0.1712,
"step": 629
},
{
"epoch": 1.7647058823529411,
"grad_norm": 0.5259542465209961,
"learning_rate": 3.769663113104516e-05,
"loss": 0.1065,
"step": 630
},
{
"epoch": 1.7675070028011204,
"grad_norm": 0.2503260374069214,
"learning_rate": 3.765738860856557e-05,
"loss": 0.1761,
"step": 631
},
{
"epoch": 1.7703081232492996,
"grad_norm": 0.5481187701225281,
"learning_rate": 3.7618104103852415e-05,
"loss": 0.2614,
"step": 632
},
{
"epoch": 1.773109243697479,
"grad_norm": 0.27601489424705505,
"learning_rate": 3.757877774720517e-05,
"loss": 0.1901,
"step": 633
},
{
"epoch": 1.7759103641456582,
"grad_norm": 0.2434099018573761,
"learning_rate": 3.7539409669062136e-05,
"loss": 0.205,
"step": 634
},
{
"epoch": 1.7787114845938374,
"grad_norm": 0.28954920172691345,
"learning_rate": 3.7500000000000003e-05,
"loss": 0.1636,
"step": 635
},
{
"epoch": 1.7815126050420167,
"grad_norm": 0.5615999698638916,
"learning_rate": 3.74605488707334e-05,
"loss": 0.1061,
"step": 636
},
{
"epoch": 1.784313725490196,
"grad_norm": 0.5544323325157166,
"learning_rate": 3.742105641211449e-05,
"loss": 0.106,
"step": 637
},
{
"epoch": 1.7871148459383752,
"grad_norm": 0.5592769384384155,
"learning_rate": 3.738152275513249e-05,
"loss": 0.1079,
"step": 638
},
{
"epoch": 1.7899159663865545,
"grad_norm": 0.2856994569301605,
"learning_rate": 3.7341948030913294e-05,
"loss": 0.1731,
"step": 639
},
{
"epoch": 1.7927170868347337,
"grad_norm": 0.3745407164096832,
"learning_rate": 3.730233237071898e-05,
"loss": 0.1404,
"step": 640
},
{
"epoch": 1.795518207282913,
"grad_norm": 0.33862021565437317,
"learning_rate": 3.726267590594744e-05,
"loss": 0.1952,
"step": 641
},
{
"epoch": 1.7983193277310925,
"grad_norm": 0.3554513156414032,
"learning_rate": 3.7222978768131854e-05,
"loss": 0.1322,
"step": 642
},
{
"epoch": 1.8011204481792717,
"grad_norm": 0.22815661132335663,
"learning_rate": 3.718324108894036e-05,
"loss": 0.1714,
"step": 643
},
{
"epoch": 1.803921568627451,
"grad_norm": 0.32825687527656555,
"learning_rate": 3.7143463000175546e-05,
"loss": 0.1421,
"step": 644
},
{
"epoch": 1.8067226890756303,
"grad_norm": 0.24787546694278717,
"learning_rate": 3.7103644633774014e-05,
"loss": 0.1682,
"step": 645
},
{
"epoch": 1.8095238095238095,
"grad_norm": 0.3160790801048279,
"learning_rate": 3.706378612180598e-05,
"loss": 0.193,
"step": 646
},
{
"epoch": 1.8123249299719888,
"grad_norm": 0.22702836990356445,
"learning_rate": 3.70238875964748e-05,
"loss": 0.171,
"step": 647
},
{
"epoch": 1.815126050420168,
"grad_norm": 0.2879033386707306,
"learning_rate": 3.6983949190116576e-05,
"loss": 0.1391,
"step": 648
},
{
"epoch": 1.8179271708683473,
"grad_norm": 0.2816312313079834,
"learning_rate": 3.6943971035199645e-05,
"loss": 0.1351,
"step": 649
},
{
"epoch": 1.8207282913165266,
"grad_norm": 0.2494056075811386,
"learning_rate": 3.690395326432421e-05,
"loss": 0.165,
"step": 650
},
{
"epoch": 1.8235294117647058,
"grad_norm": 0.5620803236961365,
"learning_rate": 3.686389601022188e-05,
"loss": 0.256,
"step": 651
},
{
"epoch": 1.826330532212885,
"grad_norm": 0.36324793100357056,
"learning_rate": 3.682379940575519e-05,
"loss": 0.1187,
"step": 652
},
{
"epoch": 1.8291316526610646,
"grad_norm": 0.32306012511253357,
"learning_rate": 3.678366358391723e-05,
"loss": 0.1183,
"step": 653
},
{
"epoch": 1.8319327731092439,
"grad_norm": 0.25600212812423706,
"learning_rate": 3.674348867783115e-05,
"loss": 0.1684,
"step": 654
},
{
"epoch": 1.8347338935574231,
"grad_norm": 0.27342215180397034,
"learning_rate": 3.670327482074973e-05,
"loss": 0.1249,
"step": 655
},
{
"epoch": 1.8375350140056024,
"grad_norm": 0.4090156555175781,
"learning_rate": 3.666302214605495e-05,
"loss": 0.179,
"step": 656
},
{
"epoch": 1.8403361344537816,
"grad_norm": 0.4678898751735687,
"learning_rate": 3.662273078725754e-05,
"loss": 0.2021,
"step": 657
},
{
"epoch": 1.843137254901961,
"grad_norm": 0.28741687536239624,
"learning_rate": 3.6582400877996546e-05,
"loss": 0.1641,
"step": 658
},
{
"epoch": 1.8459383753501402,
"grad_norm": 0.4612816572189331,
"learning_rate": 3.654203255203886e-05,
"loss": 0.2167,
"step": 659
},
{
"epoch": 1.8487394957983194,
"grad_norm": 0.27487486600875854,
"learning_rate": 3.6501625943278805e-05,
"loss": 0.1567,
"step": 660
},
{
"epoch": 1.8515406162464987,
"grad_norm": 0.4421831965446472,
"learning_rate": 3.6461181185737694e-05,
"loss": 0.2094,
"step": 661
},
{
"epoch": 1.854341736694678,
"grad_norm": 0.5941998362541199,
"learning_rate": 3.6420698413563345e-05,
"loss": 0.2084,
"step": 662
},
{
"epoch": 1.8571428571428572,
"grad_norm": 0.5112701058387756,
"learning_rate": 3.6380177761029685e-05,
"loss": 0.0836,
"step": 663
},
{
"epoch": 1.8599439775910365,
"grad_norm": 0.8637323975563049,
"learning_rate": 3.633961936253628e-05,
"loss": 0.2437,
"step": 664
},
{
"epoch": 1.8627450980392157,
"grad_norm": 0.4770047962665558,
"learning_rate": 3.629902335260789e-05,
"loss": 0.1965,
"step": 665
},
{
"epoch": 1.865546218487395,
"grad_norm": 0.3599564731121063,
"learning_rate": 3.625838986589403e-05,
"loss": 0.1134,
"step": 666
},
{
"epoch": 1.8683473389355743,
"grad_norm": 0.5263693928718567,
"learning_rate": 3.621771903716849e-05,
"loss": 0.0875,
"step": 667
},
{
"epoch": 1.8711484593837535,
"grad_norm": 0.5898877382278442,
"learning_rate": 3.617701100132897e-05,
"loss": 0.2261,
"step": 668
},
{
"epoch": 1.8739495798319328,
"grad_norm": 0.369739830493927,
"learning_rate": 3.613626589339653e-05,
"loss": 0.1718,
"step": 669
},
{
"epoch": 1.876750700280112,
"grad_norm": 0.368196576833725,
"learning_rate": 3.609548384851522e-05,
"loss": 0.194,
"step": 670
},
{
"epoch": 1.8795518207282913,
"grad_norm": 0.2884950339794159,
"learning_rate": 3.605466500195159e-05,
"loss": 0.1578,
"step": 671
},
{
"epoch": 1.8823529411764706,
"grad_norm": 0.7132876515388489,
"learning_rate": 3.601380948909425e-05,
"loss": 0.2391,
"step": 672
},
{
"epoch": 1.8851540616246498,
"grad_norm": 0.29729971289634705,
"learning_rate": 3.597291744545344e-05,
"loss": 0.1518,
"step": 673
},
{
"epoch": 1.887955182072829,
"grad_norm": 0.24395136535167694,
"learning_rate": 3.593198900666056e-05,
"loss": 0.1675,
"step": 674
},
{
"epoch": 1.8907563025210083,
"grad_norm": 0.28539448976516724,
"learning_rate": 3.589102430846773e-05,
"loss": 0.1573,
"step": 675
},
{
"epoch": 1.8935574229691876,
"grad_norm": 0.38136017322540283,
"learning_rate": 3.585002348674733e-05,
"loss": 0.2004,
"step": 676
},
{
"epoch": 1.8963585434173669,
"grad_norm": 0.5776926279067993,
"learning_rate": 3.5808986677491555e-05,
"loss": 0.0913,
"step": 677
},
{
"epoch": 1.8991596638655461,
"grad_norm": 0.4445839524269104,
"learning_rate": 3.576791401681194e-05,
"loss": 0.185,
"step": 678
},
{
"epoch": 1.9019607843137254,
"grad_norm": 0.31285786628723145,
"learning_rate": 3.5726805640939e-05,
"loss": 0.1609,
"step": 679
},
{
"epoch": 1.9047619047619047,
"grad_norm": 0.38080793619155884,
"learning_rate": 3.5685661686221644e-05,
"loss": 0.1928,
"step": 680
},
{
"epoch": 1.907563025210084,
"grad_norm": 0.37026283144950867,
"learning_rate": 3.564448228912682e-05,
"loss": 0.1829,
"step": 681
},
{
"epoch": 1.9103641456582632,
"grad_norm": 0.3265126347541809,
"learning_rate": 3.5603267586239026e-05,
"loss": 0.1563,
"step": 682
},
{
"epoch": 1.9131652661064424,
"grad_norm": 0.6355715990066528,
"learning_rate": 3.556201771425985e-05,
"loss": 0.2228,
"step": 683
},
{
"epoch": 1.9159663865546217,
"grad_norm": 0.5335772037506104,
"learning_rate": 3.5520732810007566e-05,
"loss": 0.1654,
"step": 684
},
{
"epoch": 1.918767507002801,
"grad_norm": 0.6501393914222717,
"learning_rate": 3.547941301041661e-05,
"loss": 0.097,
"step": 685
},
{
"epoch": 1.9215686274509802,
"grad_norm": 0.6850645542144775,
"learning_rate": 3.543805845253716e-05,
"loss": 0.1012,
"step": 686
},
{
"epoch": 1.9243697478991597,
"grad_norm": 0.659045398235321,
"learning_rate": 3.539666927353469e-05,
"loss": 0.0977,
"step": 687
},
{
"epoch": 1.927170868347339,
"grad_norm": 0.47713983058929443,
"learning_rate": 3.535524561068952e-05,
"loss": 0.1192,
"step": 688
},
{
"epoch": 1.9299719887955182,
"grad_norm": 0.4114561975002289,
"learning_rate": 3.5313787601396325e-05,
"loss": 0.1243,
"step": 689
},
{
"epoch": 1.9327731092436975,
"grad_norm": 0.4006536900997162,
"learning_rate": 3.527229538316371e-05,
"loss": 0.1811,
"step": 690
},
{
"epoch": 1.9355742296918768,
"grad_norm": 0.520041823387146,
"learning_rate": 3.523076909361373e-05,
"loss": 0.2083,
"step": 691
},
{
"epoch": 1.938375350140056,
"grad_norm": 0.3322596251964569,
"learning_rate": 3.518920887048149e-05,
"loss": 0.1568,
"step": 692
},
{
"epoch": 1.9411764705882353,
"grad_norm": 0.3033127188682556,
"learning_rate": 3.514761485161458e-05,
"loss": 0.1544,
"step": 693
},
{
"epoch": 1.9439775910364145,
"grad_norm": 1.5677649974822998,
"learning_rate": 3.510598717497276e-05,
"loss": 0.3283,
"step": 694
},
{
"epoch": 1.9467787114845938,
"grad_norm": 0.9142319560050964,
"learning_rate": 3.506432597862737e-05,
"loss": 0.2276,
"step": 695
},
{
"epoch": 1.949579831932773,
"grad_norm": 0.3185567259788513,
"learning_rate": 3.5022631400760944e-05,
"loss": 0.1542,
"step": 696
},
{
"epoch": 1.9523809523809523,
"grad_norm": 0.6337863802909851,
"learning_rate": 3.4980903579666744e-05,
"loss": 0.2202,
"step": 697
},
{
"epoch": 1.9551820728291318,
"grad_norm": 0.3815496861934662,
"learning_rate": 3.493914265374829e-05,
"loss": 0.1303,
"step": 698
},
{
"epoch": 1.957983193277311,
"grad_norm": 0.4644532799720764,
"learning_rate": 3.489734876151891e-05,
"loss": 0.1881,
"step": 699
},
{
"epoch": 1.9607843137254903,
"grad_norm": 0.43058714270591736,
"learning_rate": 3.485552204160126e-05,
"loss": 0.1121,
"step": 700
},
{
"epoch": 1.9635854341736696,
"grad_norm": 1.0429691076278687,
"learning_rate": 3.48136626327269e-05,
"loss": 0.2764,
"step": 701
},
{
"epoch": 1.9663865546218489,
"grad_norm": 0.6157497763633728,
"learning_rate": 3.4771770673735796e-05,
"loss": 0.0843,
"step": 702
},
{
"epoch": 1.9691876750700281,
"grad_norm": 0.33218902349472046,
"learning_rate": 3.472984630357587e-05,
"loss": 0.1626,
"step": 703
},
{
"epoch": 1.9719887955182074,
"grad_norm": 0.6079518795013428,
"learning_rate": 3.4687889661302576e-05,
"loss": 0.0859,
"step": 704
},
{
"epoch": 1.9747899159663866,
"grad_norm": 0.2988344728946686,
"learning_rate": 3.464590088607839e-05,
"loss": 0.1604,
"step": 705
},
{
"epoch": 1.977591036414566,
"grad_norm": 0.47207456827163696,
"learning_rate": 3.460388011717236e-05,
"loss": 0.1837,
"step": 706
},
{
"epoch": 1.9803921568627452,
"grad_norm": 0.35358136892318726,
"learning_rate": 3.456182749395966e-05,
"loss": 0.1482,
"step": 707
},
{
"epoch": 1.9831932773109244,
"grad_norm": 0.405224084854126,
"learning_rate": 3.451974315592113e-05,
"loss": 0.1288,
"step": 708
},
{
"epoch": 1.9859943977591037,
"grad_norm": 0.4823918640613556,
"learning_rate": 3.4477627242642776e-05,
"loss": 0.1835,
"step": 709
},
{
"epoch": 1.988795518207283,
"grad_norm": 0.3758796155452728,
"learning_rate": 3.443547989381536e-05,
"loss": 0.1303,
"step": 710
},
{
"epoch": 1.9915966386554622,
"grad_norm": 0.47359490394592285,
"learning_rate": 3.43933012492339e-05,
"loss": 0.1844,
"step": 711
},
{
"epoch": 1.9943977591036415,
"grad_norm": 0.3807266652584076,
"learning_rate": 3.43510914487972e-05,
"loss": 0.1357,
"step": 712
},
{
"epoch": 1.9971988795518207,
"grad_norm": 0.6917422413825989,
"learning_rate": 3.430885063250743e-05,
"loss": 0.2231,
"step": 713
},
{
"epoch": 2.0,
"grad_norm": 0.6366453766822815,
"learning_rate": 3.42665789404696e-05,
"loss": 0.0794,
"step": 714
},
{
"epoch": 2.0,
"eval_f1 (minor class)": 0.11274509803921569,
"eval_loss": 0.17888100445270538,
"eval_roc_auc": 0.5086586164934943,
"eval_runtime": 2.9373,
"eval_samples_per_second": 431.683,
"eval_steps_per_second": 13.618,
"step": 714
},
{
"epoch": 2.0028011204481793,
"grad_norm": 0.6449307203292847,
"learning_rate": 3.422427651289118e-05,
"loss": 0.0814,
"step": 715
},
{
"epoch": 2.0056022408963585,
"grad_norm": 1.047551155090332,
"learning_rate": 3.418194349008153e-05,
"loss": 0.2542,
"step": 716
},
{
"epoch": 2.008403361344538,
"grad_norm": 0.486785888671875,
"learning_rate": 3.413958001245152e-05,
"loss": 0.1403,
"step": 717
},
{
"epoch": 2.011204481792717,
"grad_norm": 1.1322331428527832,
"learning_rate": 3.409718622051303e-05,
"loss": 0.2194,
"step": 718
},
{
"epoch": 2.0140056022408963,
"grad_norm": 0.41819918155670166,
"learning_rate": 3.4054762254878476e-05,
"loss": 0.151,
"step": 719
},
{
"epoch": 2.0168067226890756,
"grad_norm": 0.38247114419937134,
"learning_rate": 3.401230825626037e-05,
"loss": 0.1593,
"step": 720
},
{
"epoch": 2.019607843137255,
"grad_norm": 0.7601361274719238,
"learning_rate": 3.396982436547082e-05,
"loss": 0.2249,
"step": 721
},
{
"epoch": 2.022408963585434,
"grad_norm": 1.248322606086731,
"learning_rate": 3.392731072342109e-05,
"loss": 0.197,
"step": 722
},
{
"epoch": 2.0252100840336134,
"grad_norm": 0.5805521607398987,
"learning_rate": 3.388476747112113e-05,
"loss": 0.1443,
"step": 723
},
{
"epoch": 2.0280112044817926,
"grad_norm": 0.47655874490737915,
"learning_rate": 3.384219474967908e-05,
"loss": 0.1962,
"step": 724
},
{
"epoch": 2.030812324929972,
"grad_norm": 0.48123157024383545,
"learning_rate": 3.3799592700300866e-05,
"loss": 0.1266,
"step": 725
},
{
"epoch": 2.033613445378151,
"grad_norm": 0.9366979002952576,
"learning_rate": 3.375696146428963e-05,
"loss": 0.2132,
"step": 726
},
{
"epoch": 2.0364145658263304,
"grad_norm": 0.38628774881362915,
"learning_rate": 3.3714301183045385e-05,
"loss": 0.1599,
"step": 727
},
{
"epoch": 2.0392156862745097,
"grad_norm": 0.5700298547744751,
"learning_rate": 3.3671611998064425e-05,
"loss": 0.1271,
"step": 728
},
{
"epoch": 2.042016806722689,
"grad_norm": 0.8660473823547363,
"learning_rate": 3.3628894050938945e-05,
"loss": 0.2301,
"step": 729
},
{
"epoch": 2.044817927170868,
"grad_norm": 0.7136172652244568,
"learning_rate": 3.3586147483356535e-05,
"loss": 0.1177,
"step": 730
},
{
"epoch": 2.0476190476190474,
"grad_norm": 0.727861225605011,
"learning_rate": 3.354337243709971e-05,
"loss": 0.2178,
"step": 731
},
{
"epoch": 2.0504201680672267,
"grad_norm": 1.5354722738265991,
"learning_rate": 3.350056905404543e-05,
"loss": 0.3138,
"step": 732
},
{
"epoch": 2.053221288515406,
"grad_norm": 0.6516889333724976,
"learning_rate": 3.345773747616467e-05,
"loss": 0.221,
"step": 733
},
{
"epoch": 2.0560224089635852,
"grad_norm": 0.4907413125038147,
"learning_rate": 3.34148778455219e-05,
"loss": 0.1524,
"step": 734
},
{
"epoch": 2.0588235294117645,
"grad_norm": 0.5165950059890747,
"learning_rate": 3.3371990304274656e-05,
"loss": 0.199,
"step": 735
},
{
"epoch": 2.0616246498599438,
"grad_norm": 1.439732313156128,
"learning_rate": 3.332907499467302e-05,
"loss": 0.2949,
"step": 736
},
{
"epoch": 2.064425770308123,
"grad_norm": 0.650676965713501,
"learning_rate": 3.328613205905921e-05,
"loss": 0.145,
"step": 737
},
{
"epoch": 2.0672268907563027,
"grad_norm": 0.444158673286438,
"learning_rate": 3.324316163986704e-05,
"loss": 0.219,
"step": 738
},
{
"epoch": 2.070028011204482,
"grad_norm": 0.7450339794158936,
"learning_rate": 3.320016387962151e-05,
"loss": 0.1589,
"step": 739
},
{
"epoch": 2.0728291316526612,
"grad_norm": 1.0164347887039185,
"learning_rate": 3.315713892093829e-05,
"loss": 0.1207,
"step": 740
},
{
"epoch": 2.0756302521008405,
"grad_norm": 0.6916231513023376,
"learning_rate": 3.3114086906523266e-05,
"loss": 0.17,
"step": 741
},
{
"epoch": 2.0784313725490198,
"grad_norm": 1.029963493347168,
"learning_rate": 3.307100797917207e-05,
"loss": 0.1218,
"step": 742
},
{
"epoch": 2.081232492997199,
"grad_norm": 0.8726139068603516,
"learning_rate": 3.302790228176959e-05,
"loss": 0.1337,
"step": 743
},
{
"epoch": 2.0840336134453783,
"grad_norm": 0.6706817746162415,
"learning_rate": 3.29847699572895e-05,
"loss": 0.1637,
"step": 744
},
{
"epoch": 2.0868347338935576,
"grad_norm": 0.7627484798431396,
"learning_rate": 3.294161114879382e-05,
"loss": 0.2022,
"step": 745
},
{
"epoch": 2.089635854341737,
"grad_norm": 0.6804760098457336,
"learning_rate": 3.289842599943237e-05,
"loss": 0.1245,
"step": 746
},
{
"epoch": 2.092436974789916,
"grad_norm": 0.5034189224243164,
"learning_rate": 3.285521465244237e-05,
"loss": 0.141,
"step": 747
},
{
"epoch": 2.0952380952380953,
"grad_norm": 0.9262709617614746,
"learning_rate": 3.281197725114792e-05,
"loss": 0.099,
"step": 748
},
{
"epoch": 2.0980392156862746,
"grad_norm": 0.45192310214042664,
"learning_rate": 3.276871393895954e-05,
"loss": 0.1634,
"step": 749
},
{
"epoch": 2.100840336134454,
"grad_norm": 1.2695709466934204,
"learning_rate": 3.272542485937369e-05,
"loss": 0.2591,
"step": 750
},
{
"epoch": 2.103641456582633,
"grad_norm": 0.6426392197608948,
"learning_rate": 3.26821101559723e-05,
"loss": 0.1701,
"step": 751
},
{
"epoch": 2.1064425770308124,
"grad_norm": 0.44048747420310974,
"learning_rate": 3.263876997242229e-05,
"loss": 0.1532,
"step": 752
},
{
"epoch": 2.1092436974789917,
"grad_norm": 1.7888472080230713,
"learning_rate": 3.259540445247509e-05,
"loss": 0.2987,
"step": 753
},
{
"epoch": 2.112044817927171,
"grad_norm": 1.113670825958252,
"learning_rate": 3.2552013739966145e-05,
"loss": 0.2087,
"step": 754
},
{
"epoch": 2.11484593837535,
"grad_norm": 0.6354279518127441,
"learning_rate": 3.250859797881452e-05,
"loss": 0.1307,
"step": 755
},
{
"epoch": 2.1176470588235294,
"grad_norm": 0.5894302129745483,
"learning_rate": 3.2465157313022284e-05,
"loss": 0.094,
"step": 756
},
{
"epoch": 2.1204481792717087,
"grad_norm": 0.5198976397514343,
"learning_rate": 3.242169188667416e-05,
"loss": 0.1224,
"step": 757
},
{
"epoch": 2.123249299719888,
"grad_norm": 0.4988885521888733,
"learning_rate": 3.2378201843937e-05,
"loss": 0.1241,
"step": 758
},
{
"epoch": 2.1260504201680672,
"grad_norm": 1.0682430267333984,
"learning_rate": 3.233468732905927e-05,
"loss": 0.2033,
"step": 759
},
{
"epoch": 2.1288515406162465,
"grad_norm": 1.5533567667007446,
"learning_rate": 3.2291148486370626e-05,
"loss": 0.2584,
"step": 760
},
{
"epoch": 2.1316526610644257,
"grad_norm": 0.7770599126815796,
"learning_rate": 3.224758546028143e-05,
"loss": 0.0703,
"step": 761
},
{
"epoch": 2.134453781512605,
"grad_norm": 0.8279226422309875,
"learning_rate": 3.220399839528222e-05,
"loss": 0.0776,
"step": 762
},
{
"epoch": 2.1372549019607843,
"grad_norm": 1.2002817392349243,
"learning_rate": 3.2160387435943296e-05,
"loss": 0.2055,
"step": 763
},
{
"epoch": 2.1400560224089635,
"grad_norm": 0.5353344678878784,
"learning_rate": 3.21167527269142e-05,
"loss": 0.1137,
"step": 764
},
{
"epoch": 2.142857142857143,
"grad_norm": 0.7878655791282654,
"learning_rate": 3.207309441292325e-05,
"loss": 0.0716,
"step": 765
},
{
"epoch": 2.145658263305322,
"grad_norm": 0.562883198261261,
"learning_rate": 3.202941263877706e-05,
"loss": 0.0879,
"step": 766
},
{
"epoch": 2.1484593837535013,
"grad_norm": 1.004749059677124,
"learning_rate": 3.198570754936004e-05,
"loss": 0.1883,
"step": 767
},
{
"epoch": 2.1512605042016806,
"grad_norm": 0.8928613066673279,
"learning_rate": 3.194197928963396e-05,
"loss": 0.164,
"step": 768
},
{
"epoch": 2.15406162464986,
"grad_norm": 2.246786594390869,
"learning_rate": 3.189822800463742e-05,
"loss": 0.1953,
"step": 769
},
{
"epoch": 2.156862745098039,
"grad_norm": 1.1755541563034058,
"learning_rate": 3.1854453839485385e-05,
"loss": 0.1598,
"step": 770
},
{
"epoch": 2.1596638655462184,
"grad_norm": 0.8687044382095337,
"learning_rate": 3.1810656939368744e-05,
"loss": 0.0719,
"step": 771
},
{
"epoch": 2.1624649859943976,
"grad_norm": 1.5823578834533691,
"learning_rate": 3.176683744955377e-05,
"loss": 0.1969,
"step": 772
},
{
"epoch": 2.165266106442577,
"grad_norm": 0.6787047982215881,
"learning_rate": 3.172299551538164e-05,
"loss": 0.1161,
"step": 773
},
{
"epoch": 2.168067226890756,
"grad_norm": 2.3936171531677246,
"learning_rate": 3.167913128226803e-05,
"loss": 0.2243,
"step": 774
},
{
"epoch": 2.1708683473389354,
"grad_norm": 0.9804347157478333,
"learning_rate": 3.1635244895702524e-05,
"loss": 0.0667,
"step": 775
},
{
"epoch": 2.1736694677871147,
"grad_norm": 1.6783931255340576,
"learning_rate": 3.159133650124821e-05,
"loss": 0.177,
"step": 776
},
{
"epoch": 2.176470588235294,
"grad_norm": 1.7065593004226685,
"learning_rate": 3.1547406244541175e-05,
"loss": 0.2228,
"step": 777
},
{
"epoch": 2.179271708683473,
"grad_norm": 1.4685626029968262,
"learning_rate": 3.150345427129002e-05,
"loss": 0.212,
"step": 778
},
{
"epoch": 2.1820728291316525,
"grad_norm": 1.0432506799697876,
"learning_rate": 3.145948072727535e-05,
"loss": 0.1374,
"step": 779
},
{
"epoch": 2.184873949579832,
"grad_norm": 1.9101930856704712,
"learning_rate": 3.1415485758349346e-05,
"loss": 0.2451,
"step": 780
},
{
"epoch": 2.1876750700280114,
"grad_norm": 1.068935513496399,
"learning_rate": 3.137146951043524e-05,
"loss": 0.1353,
"step": 781
},
{
"epoch": 2.1904761904761907,
"grad_norm": 0.9940873980522156,
"learning_rate": 3.132743212952684e-05,
"loss": 0.0726,
"step": 782
},
{
"epoch": 2.19327731092437,
"grad_norm": 1.092490315437317,
"learning_rate": 3.128337376168805e-05,
"loss": 0.174,
"step": 783
},
{
"epoch": 2.196078431372549,
"grad_norm": 0.75323885679245,
"learning_rate": 3.123929455305239e-05,
"loss": 0.1166,
"step": 784
},
{
"epoch": 2.1988795518207285,
"grad_norm": 1.346554160118103,
"learning_rate": 3.11951946498225e-05,
"loss": 0.1646,
"step": 785
},
{
"epoch": 2.2016806722689077,
"grad_norm": 0.9711296558380127,
"learning_rate": 3.115107419826966e-05,
"loss": 0.1301,
"step": 786
},
{
"epoch": 2.204481792717087,
"grad_norm": 0.9502242207527161,
"learning_rate": 3.1106933344733304e-05,
"loss": 0.1036,
"step": 787
},
{
"epoch": 2.2072829131652663,
"grad_norm": 1.8469451665878296,
"learning_rate": 3.106277223562054e-05,
"loss": 0.2247,
"step": 788
},
{
"epoch": 2.2100840336134455,
"grad_norm": 0.8027406334877014,
"learning_rate": 3.101859101740564e-05,
"loss": 0.1404,
"step": 789
},
{
"epoch": 2.212885154061625,
"grad_norm": 1.0038503408432007,
"learning_rate": 3.0974389836629624e-05,
"loss": 0.1889,
"step": 790
},
{
"epoch": 2.215686274509804,
"grad_norm": 2.352829933166504,
"learning_rate": 3.0930168839899666e-05,
"loss": 0.2958,
"step": 791
},
{
"epoch": 2.2184873949579833,
"grad_norm": 0.6628269553184509,
"learning_rate": 3.088592817388869e-05,
"loss": 0.1324,
"step": 792
},
{
"epoch": 2.2212885154061626,
"grad_norm": 0.7565025091171265,
"learning_rate": 3.084166798533489e-05,
"loss": 0.1405,
"step": 793
},
{
"epoch": 2.224089635854342,
"grad_norm": 0.8182944655418396,
"learning_rate": 3.079738842104115e-05,
"loss": 0.1145,
"step": 794
},
{
"epoch": 2.226890756302521,
"grad_norm": 0.6144469380378723,
"learning_rate": 3.075308962787466e-05,
"loss": 0.1457,
"step": 795
},
{
"epoch": 2.2296918767507004,
"grad_norm": 1.0344878435134888,
"learning_rate": 3.0708771752766394e-05,
"loss": 0.118,
"step": 796
},
{
"epoch": 2.2324929971988796,
"grad_norm": 0.6334454417228699,
"learning_rate": 3.06644349427106e-05,
"loss": 0.137,
"step": 797
},
{
"epoch": 2.235294117647059,
"grad_norm": 1.0241148471832275,
"learning_rate": 3.062007934476433e-05,
"loss": 0.073,
"step": 798
},
{
"epoch": 2.238095238095238,
"grad_norm": 1.3902757167816162,
"learning_rate": 3.057570510604696e-05,
"loss": 0.1635,
"step": 799
},
{
"epoch": 2.2408963585434174,
"grad_norm": 1.1507909297943115,
"learning_rate": 3.05313123737397e-05,
"loss": 0.1311,
"step": 800
}
],
"logging_steps": 1,
"max_steps": 1785,
"num_input_tokens_seen": 0,
"num_train_epochs": 5,
"save_steps": 100,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": false
},
"attributes": {}
}
},
"total_flos": 192309268647936.0,
"train_batch_size": 32,
"trial_name": null,
"trial_params": null
}